var/home/core/zuul-output/0000755000175000017500000000000015113426171014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113431703015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003544500715113431674017712 0ustar rootrootDec 02 00:06:12 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 00:06:12 crc restorecon[4700]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 00:06:12 crc restorecon[4700]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 00:06:13 crc kubenswrapper[4856]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.083139 4856 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086327 4856 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086343 4856 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086348 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086351 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086355 4856 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086359 4856 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086363 4856 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086367 4856 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086371 4856 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086375 4856 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086380 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086385 4856 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086389 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086393 4856 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086397 4856 feature_gate.go:330] unrecognized feature gate: Example Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086401 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086404 4856 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086408 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086411 4856 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086415 4856 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086420 4856 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086423 4856 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086427 4856 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086430 4856 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086434 4856 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086437 4856 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086441 4856 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086444 4856 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086448 4856 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086451 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086455 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086460 4856 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086466 4856 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086470 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086474 4856 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086478 4856 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086482 4856 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086487 4856 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086492 4856 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086497 4856 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086500 4856 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086505 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086510 4856 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086515 4856 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086518 4856 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086522 4856 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086525 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086529 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086532 4856 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086535 4856 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086539 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086542 4856 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086546 4856 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086549 4856 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086553 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086556 4856 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086560 4856 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086564 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086568 4856 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086571 4856 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086575 4856 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086578 4856 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086582 4856 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086599 4856 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086603 4856 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086606 4856 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086610 4856 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086614 4856 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086618 4856 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086623 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.086626 4856 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086836 4856 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086848 4856 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086857 4856 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086863 4856 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086868 4856 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086873 4856 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086878 4856 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086883 4856 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086887 4856 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086892 4856 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086897 4856 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086902 4856 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086906 4856 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086910 4856 flags.go:64] FLAG: --cgroup-root="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086915 4856 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086918 4856 flags.go:64] FLAG: --client-ca-file="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086922 4856 flags.go:64] FLAG: --cloud-config="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086926 4856 flags.go:64] FLAG: --cloud-provider="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086930 4856 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086935 4856 flags.go:64] FLAG: --cluster-domain="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086939 4856 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086944 4856 flags.go:64] FLAG: --config-dir="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086948 4856 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086953 4856 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086958 4856 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086962 4856 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086967 4856 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086971 4856 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086975 4856 flags.go:64] FLAG: --contention-profiling="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086980 4856 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086984 4856 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086988 4856 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086992 4856 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.086997 4856 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087001 4856 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087006 4856 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087010 4856 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087014 4856 flags.go:64] FLAG: --enable-server="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087019 4856 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087024 4856 flags.go:64] FLAG: --event-burst="100" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087029 4856 flags.go:64] FLAG: --event-qps="50" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087033 4856 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087037 4856 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087042 4856 flags.go:64] FLAG: --eviction-hard="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087047 4856 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087051 4856 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087056 4856 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087060 4856 flags.go:64] FLAG: --eviction-soft="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087064 4856 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087068 4856 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087072 4856 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087076 4856 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087080 4856 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087083 4856 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087088 4856 flags.go:64] FLAG: --feature-gates="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087092 4856 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087097 4856 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087101 4856 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087105 4856 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087109 4856 flags.go:64] FLAG: --healthz-port="10248" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087113 4856 flags.go:64] FLAG: --help="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087118 4856 flags.go:64] FLAG: --hostname-override="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087122 4856 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087126 4856 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087130 4856 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087134 4856 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087137 4856 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087142 4856 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087146 4856 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087150 4856 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087154 4856 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087158 4856 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087162 4856 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087166 4856 flags.go:64] FLAG: --kube-reserved="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087170 4856 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087174 4856 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087178 4856 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087182 4856 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087186 4856 flags.go:64] FLAG: --lock-file="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087190 4856 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087195 4856 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087199 4856 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087205 4856 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087209 4856 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087213 4856 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087218 4856 flags.go:64] FLAG: --logging-format="text" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087222 4856 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087226 4856 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087230 4856 flags.go:64] FLAG: --manifest-url="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087234 4856 flags.go:64] FLAG: --manifest-url-header="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087240 4856 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087244 4856 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087249 4856 flags.go:64] FLAG: --max-pods="110" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087254 4856 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087258 4856 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087262 4856 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087266 4856 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087270 4856 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087275 4856 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087279 4856 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087288 4856 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087292 4856 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087296 4856 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087300 4856 flags.go:64] FLAG: --pod-cidr="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087304 4856 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087310 4856 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087314 4856 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087318 4856 flags.go:64] FLAG: --pods-per-core="0" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087322 4856 flags.go:64] FLAG: --port="10250" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087326 4856 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087330 4856 flags.go:64] FLAG: --provider-id="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087334 4856 flags.go:64] FLAG: --qos-reserved="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087338 4856 flags.go:64] FLAG: --read-only-port="10255" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087342 4856 flags.go:64] FLAG: --register-node="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087345 4856 flags.go:64] FLAG: --register-schedulable="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087350 4856 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087356 4856 flags.go:64] FLAG: --registry-burst="10" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087361 4856 flags.go:64] FLAG: --registry-qps="5" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087365 4856 flags.go:64] FLAG: --reserved-cpus="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087368 4856 flags.go:64] FLAG: --reserved-memory="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087373 4856 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087378 4856 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087381 4856 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087385 4856 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087389 4856 flags.go:64] FLAG: --runonce="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087393 4856 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087398 4856 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087402 4856 flags.go:64] FLAG: --seccomp-default="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087406 4856 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087410 4856 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087418 4856 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087422 4856 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087426 4856 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087430 4856 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087434 4856 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087438 4856 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087442 4856 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087447 4856 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087451 4856 flags.go:64] FLAG: --system-cgroups="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087455 4856 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087461 4856 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087465 4856 flags.go:64] FLAG: --tls-cert-file="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087469 4856 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087473 4856 flags.go:64] FLAG: --tls-min-version="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087477 4856 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087483 4856 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087488 4856 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087492 4856 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087496 4856 flags.go:64] FLAG: --v="2" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087502 4856 flags.go:64] FLAG: --version="false" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087507 4856 flags.go:64] FLAG: --vmodule="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087512 4856 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.087517 4856 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087640 4856 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087645 4856 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087649 4856 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087653 4856 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087657 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087663 4856 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087667 4856 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087671 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087674 4856 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087680 4856 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087684 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087687 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087691 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087695 4856 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087698 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087702 4856 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087705 4856 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087709 4856 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087712 4856 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087715 4856 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087719 4856 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087722 4856 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087726 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087729 4856 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087735 4856 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087739 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087743 4856 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087747 4856 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087750 4856 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087754 4856 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087758 4856 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087762 4856 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087766 4856 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087770 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087774 4856 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087778 4856 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087781 4856 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087785 4856 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087789 4856 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087792 4856 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087796 4856 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087802 4856 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087806 4856 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087809 4856 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087813 4856 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087816 4856 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087820 4856 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087823 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087827 4856 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087831 4856 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087835 4856 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087838 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087842 4856 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087845 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087850 4856 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087854 4856 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087859 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087863 4856 feature_gate.go:330] unrecognized feature gate: Example Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087867 4856 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087871 4856 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087874 4856 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087878 4856 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087881 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087885 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087888 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087892 4856 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087896 4856 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087899 4856 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087903 4856 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087906 4856 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.087909 4856 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.088057 4856 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.097448 4856 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.097487 4856 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098922 4856 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098956 4856 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098967 4856 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098976 4856 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098985 4856 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.098995 4856 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099004 4856 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099013 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099030 4856 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099039 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099047 4856 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099056 4856 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099064 4856 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099073 4856 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099083 4856 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099093 4856 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099102 4856 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099110 4856 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099119 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099127 4856 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099136 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099144 4856 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099152 4856 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099161 4856 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099169 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099178 4856 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099187 4856 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099201 4856 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099211 4856 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099223 4856 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099235 4856 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099245 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099255 4856 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099265 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099274 4856 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099282 4856 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099292 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099365 4856 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099373 4856 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099419 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099427 4856 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099753 4856 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099801 4856 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099813 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099822 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099834 4856 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099846 4856 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099856 4856 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099866 4856 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099876 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099887 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099899 4856 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099910 4856 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099920 4856 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099929 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099948 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099964 4856 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099979 4856 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.099992 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100005 4856 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100019 4856 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100033 4856 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100046 4856 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100062 4856 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100076 4856 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100085 4856 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100095 4856 feature_gate.go:330] unrecognized feature gate: Example Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100104 4856 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100113 4856 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100121 4856 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100130 4856 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.100146 4856 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100430 4856 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100447 4856 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100456 4856 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100465 4856 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100474 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100483 4856 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100491 4856 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100502 4856 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100511 4856 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100521 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100529 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100537 4856 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100545 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100556 4856 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100567 4856 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100577 4856 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100585 4856 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100625 4856 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100635 4856 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100644 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100653 4856 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100661 4856 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100671 4856 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100679 4856 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100688 4856 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100696 4856 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100704 4856 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100712 4856 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100720 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100727 4856 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100736 4856 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100743 4856 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100751 4856 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100759 4856 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100767 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100776 4856 feature_gate.go:330] unrecognized feature gate: Example Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100784 4856 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100793 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100801 4856 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100810 4856 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100818 4856 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100826 4856 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100834 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100841 4856 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100849 4856 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100859 4856 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100867 4856 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100875 4856 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100882 4856 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100891 4856 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100899 4856 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100907 4856 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100915 4856 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100923 4856 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100931 4856 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100940 4856 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100948 4856 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100956 4856 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100964 4856 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100971 4856 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100979 4856 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100987 4856 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.100996 4856 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101004 4856 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101011 4856 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101034 4856 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101045 4856 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101053 4856 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101060 4856 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101068 4856 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.101076 4856 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.101089 4856 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.101667 4856 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.106120 4856 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.106267 4856 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.107115 4856 server.go:997] "Starting client certificate rotation" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.107154 4856 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.107784 4856 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-05 06:19:53.508560376 +0000 UTC Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.108020 4856 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 78h13m40.400549325s for next certificate rotation Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.115759 4856 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.118522 4856 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.131516 4856 log.go:25] "Validated CRI v1 runtime API" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.148575 4856 log.go:25] "Validated CRI v1 image API" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.150300 4856 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.153473 4856 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-00-01-00-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.153513 4856 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.172555 4856 manager.go:217] Machine: {Timestamp:2025-12-02 00:06:13.168999188 +0000 UTC m=+0.195367222 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:3d1824ac-8d4d-4481-a69e-2d81f0b86b53 BootID:5c7b2cda-4eb3-4a4c-9166-bf25d0cae556 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:2d:94:84 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:2d:94:84 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a4:20:85 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:89:2a:ea Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:36:1f:5d Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:56:e9:0d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:22:50:3f:36:11:f1 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ee:c6:05:0d:bd:44 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.172998 4856 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.173180 4856 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.174048 4856 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.174388 4856 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.174436 4856 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175124 4856 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175157 4856 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175363 4856 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175406 4856 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175641 4856 state_mem.go:36] "Initialized new in-memory state store" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.175833 4856 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.176809 4856 kubelet.go:418] "Attempting to sync node with API server" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.176842 4856 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.176867 4856 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.176887 4856 kubelet.go:324] "Adding apiserver pod source" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.176905 4856 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.179054 4856 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.179412 4856 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.180231 4856 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.180238 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.180346 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.180231 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.180760 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.180790 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181077 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181211 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181336 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181459 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181573 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181721 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181848 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.181991 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.182131 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.182289 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.182406 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.182891 4856 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.183708 4856 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.183899 4856 server.go:1280] "Started kubelet" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.184019 4856 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.183974 4856 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.184740 4856 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.185911 4856 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.185958 4856 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.185996 4856 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 12:41:40.477782098 +0000 UTC Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.186259 4856 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.186279 4856 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.186297 4856 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.186372 4856 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 00:06:13 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.186904 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.186956 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.186238 4856 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.222:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d3d3fca3361ca default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 00:06:13.18370145 +0000 UTC m=+0.210069494,LastTimestamp:2025-12-02 00:06:13.18370145 +0000 UTC m=+0.210069494,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.187681 4856 factory.go:55] Registering systemd factory Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.187713 4856 factory.go:221] Registration of the systemd container factory successfully Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.188041 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="200ms" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.188266 4856 factory.go:153] Registering CRI-O factory Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.188289 4856 factory.go:221] Registration of the crio container factory successfully Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.188376 4856 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.188405 4856 factory.go:103] Registering Raw factory Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.188428 4856 manager.go:1196] Started watching for new ooms in manager Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.190044 4856 manager.go:319] Starting recovery of all containers Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.191569 4856 server.go:460] "Adding debug handlers to kubelet server" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.208388 4856 manager.go:324] Recovery completed Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212650 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212738 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212763 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212784 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212803 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212825 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212843 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212861 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212885 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212906 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212929 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212954 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.212982 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.213009 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219098 4856 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219162 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219189 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219210 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219234 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219253 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219272 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219291 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219311 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219330 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219350 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219367 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219388 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219412 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219462 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219482 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219503 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219523 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219543 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219563 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219581 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219640 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219667 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219692 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219715 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219739 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219759 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219781 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219800 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219819 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219839 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219858 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219877 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219895 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219914 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219933 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219951 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219969 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.219988 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220013 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220033 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220098 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220120 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220142 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220161 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220181 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220202 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220220 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220239 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220258 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220277 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220296 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220315 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220334 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220354 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220374 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220393 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220414 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220433 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220451 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220471 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220490 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220511 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220531 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220550 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220567 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220586 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220645 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220666 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220685 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220703 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220722 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220741 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220760 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220815 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220839 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220859 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220903 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220926 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220945 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220964 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.220982 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221000 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221019 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221039 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221056 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221074 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221102 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221127 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221153 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221181 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221214 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221243 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221269 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221297 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221326 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221354 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221383 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221411 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221440 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221468 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221491 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221513 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221532 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221649 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221666 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221676 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221685 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221695 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221704 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221716 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221725 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221751 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221761 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221770 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221780 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221790 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221799 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221810 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221819 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221827 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221837 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221847 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221856 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221865 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221874 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221884 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221895 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221905 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221914 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221922 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221931 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221940 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221951 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221960 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221968 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221978 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.221987 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222027 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222038 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222048 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222056 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222066 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222075 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222084 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222093 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222102 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222110 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222119 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222128 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222136 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222146 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222155 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222163 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222172 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222181 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222190 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222203 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222211 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222219 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222228 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222238 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222247 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222255 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222263 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222273 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222281 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222290 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222303 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222312 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222320 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222330 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222339 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222347 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222356 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222364 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222373 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222381 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222390 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222398 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222406 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222414 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222423 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222431 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222441 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222453 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222462 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222471 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222480 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222487 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222497 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222506 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222515 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222524 4856 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222532 4856 reconstruct.go:97] "Volume reconstruction finished" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.222539 4856 reconciler.go:26] "Reconciler: start to sync state" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.225195 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.227765 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.227795 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.227803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.228813 4856 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.228826 4856 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.228847 4856 state_mem.go:36] "Initialized new in-memory state store" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.239001 4856 policy_none.go:49] "None policy: Start" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.239800 4856 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.239834 4856 state_mem.go:35] "Initializing new in-memory state store" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.248636 4856 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.250900 4856 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.250952 4856 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.250995 4856 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.251052 4856 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.251481 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.251531 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.286545 4856 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.294940 4856 manager.go:334] "Starting Device Plugin manager" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.295008 4856 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.295023 4856 server.go:79] "Starting device plugin registration server" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.295430 4856 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.295451 4856 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.295763 4856 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.296991 4856 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.297030 4856 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.309064 4856 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.351467 4856 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.351560 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.352448 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.352478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.352487 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.352656 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353094 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353256 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353417 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353447 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353523 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353871 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.353971 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354190 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354225 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354234 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354380 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354569 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354659 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354682 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.354796 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355801 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355825 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355833 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355885 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355893 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.355988 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.356034 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.356060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.356073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.356105 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.356134 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357452 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357492 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357537 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357547 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357683 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.357715 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.358960 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.358984 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.358997 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.388737 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="400ms" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.396163 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.397343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.397376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.397387 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.397411 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.397839 4856 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.222:6443: connect: connection refused" node="crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424847 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424889 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424916 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424936 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424964 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.424989 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425009 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425026 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425048 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425069 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425089 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425110 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425130 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425149 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.425168 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526137 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526203 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526227 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526244 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526260 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526273 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526290 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526306 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526323 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526338 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526352 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526367 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526381 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526395 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526409 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526420 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526483 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526537 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526577 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526647 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526684 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526716 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526725 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526749 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526740 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526779 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526761 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526794 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526814 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.526867 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.598383 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.599584 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.599639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.599651 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.599674 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.600109 4856 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.222:6443: connect: connection refused" node="crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.684315 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.694193 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.708868 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2fd24a0145f997ffa9552ee8bc3d7292dc19efec35933e8a1368cd5b5503f567 WatchSource:0}: Error finding container 2fd24a0145f997ffa9552ee8bc3d7292dc19efec35933e8a1368cd5b5503f567: Status 404 returned error can't find the container with id 2fd24a0145f997ffa9552ee8bc3d7292dc19efec35933e8a1368cd5b5503f567 Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.710317 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-5d5abd82c39eeb0d34558ad30a535253ade94720035408eba74e0b95994f2cef WatchSource:0}: Error finding container 5d5abd82c39eeb0d34558ad30a535253ade94720035408eba74e0b95994f2cef: Status 404 returned error can't find the container with id 5d5abd82c39eeb0d34558ad30a535253ade94720035408eba74e0b95994f2cef Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.711502 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.716228 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: I1202 00:06:13.721210 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.727211 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-ad64921f7c01d9105dd203634a88730f5926401ea37fc6b78d59d874a046568c WatchSource:0}: Error finding container ad64921f7c01d9105dd203634a88730f5926401ea37fc6b78d59d874a046568c: Status 404 returned error can't find the container with id ad64921f7c01d9105dd203634a88730f5926401ea37fc6b78d59d874a046568c Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.737378 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-8643053b613a6cc86dbdc6a27b8eabeb6dd8abce3b52203896b8383eba89c550 WatchSource:0}: Error finding container 8643053b613a6cc86dbdc6a27b8eabeb6dd8abce3b52203896b8383eba89c550: Status 404 returned error can't find the container with id 8643053b613a6cc86dbdc6a27b8eabeb6dd8abce3b52203896b8383eba89c550 Dec 02 00:06:13 crc kubenswrapper[4856]: W1202 00:06:13.743414 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e085faf3413ea740fc4fd47b88fc0ccfbfb621b966e8d5ceecb1bed7069827f9 WatchSource:0}: Error finding container e085faf3413ea740fc4fd47b88fc0ccfbfb621b966e8d5ceecb1bed7069827f9: Status 404 returned error can't find the container with id e085faf3413ea740fc4fd47b88fc0ccfbfb621b966e8d5ceecb1bed7069827f9 Dec 02 00:06:13 crc kubenswrapper[4856]: E1202 00:06:13.790323 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="800ms" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.001202 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.004573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.004861 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.004876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.004903 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.005340 4856 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.222:6443: connect: connection refused" node="crc" Dec 02 00:06:14 crc kubenswrapper[4856]: W1202 00:06:14.170862 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.170951 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.184929 4856 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.187043 4856 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 20:17:17.331772112 +0000 UTC Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.187109 4856 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1004h11m3.144667613s for next certificate rotation Dec 02 00:06:14 crc kubenswrapper[4856]: W1202 00:06:14.211796 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.211911 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.260011 4856 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8" exitCode=0 Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.260110 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.260219 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e085faf3413ea740fc4fd47b88fc0ccfbfb621b966e8d5ceecb1bed7069827f9"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.260412 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.261834 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.261864 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.261873 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.262727 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.262788 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8643053b613a6cc86dbdc6a27b8eabeb6dd8abce3b52203896b8383eba89c550"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.265739 4856 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e" exitCode=0 Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.265797 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.265815 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ad64921f7c01d9105dd203634a88730f5926401ea37fc6b78d59d874a046568c"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.265906 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.266725 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.266775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.266793 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.267467 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d" exitCode=0 Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.267552 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.267577 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d5abd82c39eeb0d34558ad30a535253ade94720035408eba74e0b95994f2cef"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.267702 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.268499 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.268522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.268533 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.268947 4856 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc" exitCode=0 Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.268982 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.269002 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2fd24a0145f997ffa9552ee8bc3d7292dc19efec35933e8a1368cd5b5503f567"} Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.269064 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.269777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.269812 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.269828 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.271252 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.272390 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.272416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.272428 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.591415 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="1.6s" Dec 02 00:06:14 crc kubenswrapper[4856]: W1202 00:06:14.613024 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.613102 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:14 crc kubenswrapper[4856]: W1202 00:06:14.759922 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.222:6443: connect: connection refused Dec 02 00:06:14 crc kubenswrapper[4856]: E1202 00:06:14.760042 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.222:6443: connect: connection refused" logger="UnhandledError" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.805564 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.808040 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.808076 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.808086 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:14 crc kubenswrapper[4856]: I1202 00:06:14.808110 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.273770 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.273838 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.273866 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.274005 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.275196 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.275227 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.275239 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.276192 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.276215 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.276228 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.276247 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.277363 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.277406 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.277424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.279255 4856 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1" exitCode=0 Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.279343 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.279482 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.280446 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.280491 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.280508 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283698 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283736 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283752 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283766 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283778 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.283871 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.284865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.284890 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.284899 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.286157 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b"} Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.286220 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.286846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.286866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:15 crc kubenswrapper[4856]: I1202 00:06:15.286875 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.044285 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.113218 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294261 4856 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd" exitCode=0 Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294329 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd"} Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294465 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294510 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294612 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.294582 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295820 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295894 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295900 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.295932 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.296360 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.299822 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:16 crc kubenswrapper[4856]: I1202 00:06:16.299883 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.300772 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e"} Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.300835 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d"} Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.300900 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.300905 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302009 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302794 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302833 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:17 crc kubenswrapper[4856]: I1202 00:06:17.302848 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.307678 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516"} Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.307735 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81"} Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.307753 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4"} Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.307952 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.309519 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.309624 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.309654 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:18 crc kubenswrapper[4856]: I1202 00:06:18.490867 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.073960 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.074129 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.075553 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.075647 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.075666 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.309934 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.311284 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.311317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.311329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.436322 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.436479 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.437913 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.437974 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.438000 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.450621 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.450757 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.451827 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.451889 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:19 crc kubenswrapper[4856]: I1202 00:06:19.451912 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.313043 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.314986 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.315046 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.315076 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.423005 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.423638 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.425266 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.425329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.425357 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:20 crc kubenswrapper[4856]: I1202 00:06:20.428077 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:21 crc kubenswrapper[4856]: I1202 00:06:21.315244 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:21 crc kubenswrapper[4856]: I1202 00:06:21.316570 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:21 crc kubenswrapper[4856]: I1202 00:06:21.316707 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:21 crc kubenswrapper[4856]: I1202 00:06:21.316730 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.594099 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.594299 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.595668 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.595732 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.595750 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.740845 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.741054 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.742414 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.742466 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:22 crc kubenswrapper[4856]: I1202 00:06:22.742483 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:23 crc kubenswrapper[4856]: E1202 00:06:23.310268 4856 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 00:06:24 crc kubenswrapper[4856]: E1202 00:06:24.810086 4856 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 02 00:06:24 crc kubenswrapper[4856]: E1202 00:06:24.960460 4856 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187d3d3fca3361ca default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 00:06:13.18370145 +0000 UTC m=+0.210069494,LastTimestamp:2025-12-02 00:06:13.18370145 +0000 UTC m=+0.210069494,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 00:06:25 crc kubenswrapper[4856]: I1202 00:06:25.185171 4856 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 00:06:25 crc kubenswrapper[4856]: I1202 00:06:25.231565 4856 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 00:06:25 crc kubenswrapper[4856]: I1202 00:06:25.231724 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 00:06:25 crc kubenswrapper[4856]: I1202 00:06:25.741451 4856 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 00:06:25 crc kubenswrapper[4856]: I1202 00:06:25.741531 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 00:06:26 crc kubenswrapper[4856]: W1202 00:06:26.008040 4856 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.008128 4856 trace.go:236] Trace[599958931]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 00:06:16.006) (total time: 10001ms): Dec 02 00:06:26 crc kubenswrapper[4856]: Trace[599958931]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:06:26.008) Dec 02 00:06:26 crc kubenswrapper[4856]: Trace[599958931]: [10.001857361s] [10.001857361s] END Dec 02 00:06:26 crc kubenswrapper[4856]: E1202 00:06:26.008150 4856 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.044531 4856 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.044611 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 00:06:26 crc kubenswrapper[4856]: E1202 00:06:26.192544 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.407667 4856 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.407740 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.411075 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.412142 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.412207 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.412227 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:26 crc kubenswrapper[4856]: I1202 00:06:26.412265 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:29 crc kubenswrapper[4856]: I1202 00:06:29.443440 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:29 crc kubenswrapper[4856]: I1202 00:06:29.443582 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:29 crc kubenswrapper[4856]: I1202 00:06:29.444694 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:29 crc kubenswrapper[4856]: I1202 00:06:29.444819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:29 crc kubenswrapper[4856]: I1202 00:06:29.444900 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:30 crc kubenswrapper[4856]: I1202 00:06:30.063967 4856 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.049060 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.049194 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.050241 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.050361 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.050447 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.053723 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.337914 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.338803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.338846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.338858 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.402561 4856 trace.go:236] Trace[802442337]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 00:06:17.922) (total time: 13479ms): Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[802442337]: ---"Objects listed" error: 13479ms (00:06:31.402) Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[802442337]: [13.47961536s] [13.47961536s] END Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.402610 4856 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.402745 4856 trace.go:236] Trace[1858899103]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 00:06:17.789) (total time: 13613ms): Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[1858899103]: ---"Objects listed" error: 13613ms (00:06:31.402) Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[1858899103]: [13.613345306s] [13.613345306s] END Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.402771 4856 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.403530 4856 trace.go:236] Trace[1652547768]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 00:06:16.508) (total time: 14894ms): Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[1652547768]: ---"Objects listed" error: 14894ms (00:06:31.403) Dec 02 00:06:31 crc kubenswrapper[4856]: Trace[1652547768]: [14.894904987s] [14.894904987s] END Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.403554 4856 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.404394 4856 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 00:06:31 crc kubenswrapper[4856]: E1202 00:06:31.419939 4856 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.437304 4856 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44318->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.437358 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44318->192.168.126.11:17697: read: connection reset by peer" Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.437689 4856 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 00:06:31 crc kubenswrapper[4856]: I1202 00:06:31.437747 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.191186 4856 apiserver.go:52] "Watching apiserver" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.193946 4856 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.194178 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.194552 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.194648 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.194832 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.194920 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.195291 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.195354 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.196719 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.196806 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.196895 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.199345 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.199787 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.199817 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.200328 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.200442 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.200530 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.200563 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.200705 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.201504 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.266235 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.281347 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.287828 4856 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.291795 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.302146 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309884 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309924 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309942 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309958 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309974 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.309992 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310009 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310023 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310037 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310052 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310067 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310081 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310096 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310111 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310128 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310142 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310155 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310171 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310185 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310199 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310254 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310272 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310287 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310329 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310355 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310350 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310371 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310456 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310489 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310514 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310540 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310564 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310581 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310612 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310641 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310665 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310693 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310721 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310752 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310779 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310809 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310834 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310859 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310881 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310911 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.310947 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312639 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311145 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312668 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311197 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312685 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312691 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311230 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311258 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312744 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312768 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312786 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312802 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312820 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312837 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312853 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312872 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312889 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312904 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312919 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312934 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312951 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312968 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313007 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313024 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313039 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313053 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313069 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313102 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313116 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313133 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313149 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313164 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313180 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313194 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313208 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313224 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313241 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313256 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313273 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313290 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313309 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313323 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313340 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313357 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313374 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313389 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313405 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313420 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313436 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313457 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313472 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313487 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313502 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313517 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313531 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313546 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313563 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313579 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313611 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313628 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313646 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313661 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313685 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313699 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313716 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313733 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313748 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313763 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313779 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313797 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313813 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313829 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313846 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313863 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313904 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313922 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313938 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313954 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313973 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313990 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314009 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314026 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314042 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314062 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314078 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314093 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314110 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314126 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314142 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314159 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314175 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311351 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311408 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311423 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311423 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311433 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311441 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311481 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311522 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311652 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311688 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311706 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314233 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311720 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311907 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314255 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312010 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312087 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312129 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312158 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312165 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312188 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312192 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312227 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312360 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312411 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312444 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312548 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312637 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312683 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312724 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312909 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.312908 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313117 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313288 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313375 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313537 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313581 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313601 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313820 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313854 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313898 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313944 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.313968 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314016 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314092 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314145 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314177 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314209 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314448 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.311979 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314453 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314510 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314741 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314986 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.315038 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.315091 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316316 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.314193 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316424 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316448 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316470 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316486 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316501 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316518 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316536 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316553 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316559 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316570 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316580 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316605 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316602 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316700 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316726 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316746 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316766 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316785 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316801 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316824 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316840 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316859 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316876 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316893 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316910 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316999 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317018 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317035 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317051 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317068 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317101 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317120 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317136 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317155 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317170 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317187 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317202 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317220 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317240 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317260 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317277 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317299 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317323 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317338 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317355 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317370 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317386 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317405 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317399 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317421 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317553 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317801 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317823 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317849 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317883 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317906 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317939 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317962 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317985 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318001 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318016 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318033 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318049 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318074 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318107 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318133 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318158 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318183 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318227 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318254 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318272 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318291 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318310 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318348 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318365 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318385 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318406 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318424 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318442 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318464 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318484 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318504 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318557 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318607 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318622 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318634 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318644 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318653 4856 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318662 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318671 4856 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318681 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318691 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318701 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318711 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318721 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318759 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318769 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318778 4856 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318788 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318796 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318806 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318815 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318825 4856 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318835 4856 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318844 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318853 4856 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318863 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318872 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318881 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318891 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318901 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318910 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318920 4856 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318929 4856 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318938 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318947 4856 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318958 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318966 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318976 4856 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318985 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318993 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319003 4856 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319012 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319021 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319035 4856 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319045 4856 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319054 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319063 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319072 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319081 4856 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319091 4856 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319100 4856 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319110 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319118 4856 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319128 4856 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319137 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319146 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319154 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319163 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319173 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319182 4856 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319192 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319201 4856 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319210 4856 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319220 4856 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319229 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319239 4856 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319256 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316856 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323829 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316949 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.316974 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317274 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317571 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.317720 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318060 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318140 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318162 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318264 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318370 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318425 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318451 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318485 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318815 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318837 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324034 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318840 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.318928 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319828 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319889 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.319996 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320129 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320155 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320223 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320229 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320259 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320408 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320692 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320821 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320835 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.320866 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:06:32.82085025 +0000 UTC m=+19.847218244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.320929 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.321022 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.321570 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.321654 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.321882 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.321994 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322116 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322255 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322413 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322475 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322490 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322491 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322510 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322511 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.322795 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323043 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323067 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323278 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323512 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323519 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323552 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323565 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323662 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323795 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.323804 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324389 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324398 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324405 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324392 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324646 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.324816 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324818 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.324867 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:32.824854376 +0000 UTC m=+19.851222450 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324979 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325256 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325421 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.325512 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.325562 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:32.825549045 +0000 UTC m=+19.851917139 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325607 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325671 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325757 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325786 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325985 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.325976 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326067 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326144 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326350 4856 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324826 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326502 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326693 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.326867 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.324680 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327016 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327097 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327236 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327296 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327305 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327303 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327675 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327793 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.327816 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.328003 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.329729 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.329950 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.331607 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.332159 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.332926 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.332962 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.333076 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.337217 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.339956 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.340160 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.340633 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.341166 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.344235 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.344996 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.345132 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.345941 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.346391 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.346865 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.346987 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.347692 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.348576 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.348619 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.348706 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.348822 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.348888 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.348910 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:32.848873635 +0000 UTC m=+19.875241639 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.349208 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.349672 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.349786 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.349850 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.349979 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.350417 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.350438 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.350457 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.350503 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:32.850483408 +0000 UTC m=+19.876851412 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.351027 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.352224 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.352417 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.352827 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.353029 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.353422 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.353581 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.353620 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.354210 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.354301 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.354430 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.354577 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.355297 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.355832 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.356000 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.357966 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5" exitCode=255 Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.358019 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5"} Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.363279 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.368575 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.370667 4856 scope.go:117] "RemoveContainer" containerID="4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.372095 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.374043 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.382695 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.386791 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.388841 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.390040 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.404360 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.413920 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.420786 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421085 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421136 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421168 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421177 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421186 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421195 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421207 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421216 4856 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421215 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421241 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421287 4856 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421298 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421308 4856 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421318 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421327 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421336 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421345 4856 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421353 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421362 4856 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421370 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421378 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421387 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421396 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421405 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421413 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421421 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421429 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421421 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421437 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421508 4856 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421521 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421537 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421546 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421555 4856 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421566 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421575 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421584 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421608 4856 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421618 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421628 4856 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421636 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421646 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421656 4856 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421665 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421675 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421684 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421693 4856 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421703 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421712 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421721 4856 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421731 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421739 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421747 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421757 4856 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421764 4856 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421772 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421780 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421788 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421797 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421807 4856 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421824 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421832 4856 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421840 4856 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421849 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421858 4856 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421865 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421874 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421882 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421891 4856 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421899 4856 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421910 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421917 4856 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421925 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421933 4856 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421941 4856 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421949 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421957 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421964 4856 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.421972 4856 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422067 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422078 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422088 4856 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422096 4856 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422104 4856 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422111 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422120 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422127 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422138 4856 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422146 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422156 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422165 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422174 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422185 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422194 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422205 4856 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422215 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422224 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422235 4856 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422246 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422255 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422264 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422271 4856 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422280 4856 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422288 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422296 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422303 4856 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422311 4856 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422320 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422329 4856 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422336 4856 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422344 4856 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422352 4856 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422360 4856 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422369 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422377 4856 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422385 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422392 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422401 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422408 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422417 4856 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422428 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422437 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422445 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422453 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422461 4856 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422493 4856 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422502 4856 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422510 4856 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422518 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422526 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.422534 4856 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.423842 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.434459 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.509729 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.518244 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.525727 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 00:06:32 crc kubenswrapper[4856]: W1202 00:06:32.555687 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-3bbdd0b766d3943e9ffdc04bfafc7b5700cb0603dbd5aa0f6385bcc2f673792b WatchSource:0}: Error finding container 3bbdd0b766d3943e9ffdc04bfafc7b5700cb0603dbd5aa0f6385bcc2f673792b: Status 404 returned error can't find the container with id 3bbdd0b766d3943e9ffdc04bfafc7b5700cb0603dbd5aa0f6385bcc2f673792b Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.640796 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.659809 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.662546 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.745306 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.751232 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.764849 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.771143 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.779124 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.816841 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.824931 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.825053 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.825180 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.825234 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:33.825218088 +0000 UTC m=+20.851586092 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.825302 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:06:33.82529366 +0000 UTC m=+20.851661664 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.865545 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.873037 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.889054 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.922152 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.925848 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.925898 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.925923 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926063 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926082 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926095 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926144 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:33.926127941 +0000 UTC m=+20.952495945 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926204 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926220 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926229 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926256 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:33.926247894 +0000 UTC m=+20.952615898 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926306 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: E1202 00:06:32.926336 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:33.926327456 +0000 UTC m=+20.952695460 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.949170 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.973796 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:32 crc kubenswrapper[4856]: I1202 00:06:32.991439 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.006494 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.016495 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.028056 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.037690 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.048203 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.059268 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.071011 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-rl6j8"] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.071393 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.073226 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.073564 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.075037 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.088018 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.103642 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.113641 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.127338 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5969950-38f4-420c-8824-e9164238cacf-hosts-file\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.127406 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzv4w\" (UniqueName: \"kubernetes.io/projected/c5969950-38f4-420c-8824-e9164238cacf-kube-api-access-kzv4w\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.128705 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.139531 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.154143 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.170676 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.182011 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.192551 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.200936 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.228935 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5969950-38f4-420c-8824-e9164238cacf-hosts-file\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.228989 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzv4w\" (UniqueName: \"kubernetes.io/projected/c5969950-38f4-420c-8824-e9164238cacf-kube-api-access-kzv4w\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.229321 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c5969950-38f4-420c-8824-e9164238cacf-hosts-file\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.247723 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzv4w\" (UniqueName: \"kubernetes.io/projected/c5969950-38f4-420c-8824-e9164238cacf-kube-api-access-kzv4w\") pod \"node-resolver-rl6j8\" (UID: \"c5969950-38f4-420c-8824-e9164238cacf\") " pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.256360 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.257218 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.258334 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.259305 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.260379 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.261434 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.262243 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.263120 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.264198 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.265095 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.267530 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.268465 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.269880 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.271331 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.272175 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.272959 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.274186 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.275027 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.275945 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.276570 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.277248 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.278346 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.279054 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.279580 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.280959 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.281371 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.282417 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.282796 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.283101 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.283980 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.284718 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.285756 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.286246 4856 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.286399 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.289084 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.289674 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.290179 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.292329 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.293861 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.294524 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.295901 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.297377 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.298499 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.299268 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.300455 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.301850 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.304302 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.304872 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.305341 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.306228 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.307471 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.308392 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.308872 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.309331 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.311749 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.312365 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.313269 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.320556 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.332832 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.335207 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.347102 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.362604 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.365655 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.366142 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.366412 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.366736 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3bbdd0b766d3943e9ffdc04bfafc7b5700cb0603dbd5aa0f6385bcc2f673792b"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.369113 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.369138 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.369147 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fedb20064dc08c1c835bfe8bdb199cb15cf16e8b5eb078217811d74f1c33a811"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.371086 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d"} Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.371117 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"99d74a993eef4c042995348d1ee77805ea7b2c087347f5becfbfc6d67e7e1749"} Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.376517 4856 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.378209 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.378404 4856 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.384079 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rl6j8" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.395055 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: W1202 00:06:33.398957 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5969950_38f4_420c_8824_e9164238cacf.slice/crio-57755e1790b4538d5503304b0ad5754e486c35092fba48bf41045d493e1584b4 WatchSource:0}: Error finding container 57755e1790b4538d5503304b0ad5754e486c35092fba48bf41045d493e1584b4: Status 404 returned error can't find the container with id 57755e1790b4538d5503304b0ad5754e486c35092fba48bf41045d493e1584b4 Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.409314 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.428395 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.444326 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.464625 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.473107 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-qlft7"] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.473556 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-455ww"] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.473786 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.474035 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.475146 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5mfwj"] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.475309 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.475666 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479289 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479341 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479357 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479545 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479649 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.479842 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480083 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480229 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480290 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480657 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480413 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.480471 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.519661 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531066 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-conf-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531181 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-cni-binary-copy\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531219 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-bin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531241 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0271f00d-b420-4dee-aa8b-92d6fc294b2a-mcd-auth-proxy-config\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531260 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531292 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-netns\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531308 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-multus-certs\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531331 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-system-cni-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531365 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531387 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-os-release\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531402 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-k8s-cni-cncf-io\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531434 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-kubelet\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531448 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-daemon-config\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531556 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-os-release\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531614 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-system-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531632 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cnibin\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531647 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-multus\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531661 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj72w\" (UniqueName: \"kubernetes.io/projected/536def47-c9d3-4c3e-9b4a-3776e034998b-kube-api-access-zj72w\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531678 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-cnibin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531691 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-etc-kubernetes\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531737 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-binary-copy\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531764 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p6v6\" (UniqueName: \"kubernetes.io/projected/1fa9541c-8427-40d2-aa5d-b53cb430bddd-kube-api-access-6p6v6\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531783 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-socket-dir-parent\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531801 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0271f00d-b420-4dee-aa8b-92d6fc294b2a-proxy-tls\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531824 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4brt\" (UniqueName: \"kubernetes.io/projected/0271f00d-b420-4dee-aa8b-92d6fc294b2a-kube-api-access-p4brt\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531861 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531881 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-hostroot\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.531919 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0271f00d-b420-4dee-aa8b-92d6fc294b2a-rootfs\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.532416 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.546843 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.572273 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.594047 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.607115 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.620019 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.629973 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632844 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-socket-dir-parent\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632871 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-binary-copy\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632897 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p6v6\" (UniqueName: \"kubernetes.io/projected/1fa9541c-8427-40d2-aa5d-b53cb430bddd-kube-api-access-6p6v6\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632915 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0271f00d-b420-4dee-aa8b-92d6fc294b2a-proxy-tls\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632931 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4brt\" (UniqueName: \"kubernetes.io/projected/0271f00d-b420-4dee-aa8b-92d6fc294b2a-kube-api-access-p4brt\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632953 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632949 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-socket-dir-parent\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.632971 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-hostroot\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633017 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-hostroot\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633172 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633253 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0271f00d-b420-4dee-aa8b-92d6fc294b2a-rootfs\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633294 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-conf-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633314 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-cni-binary-copy\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633407 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0271f00d-b420-4dee-aa8b-92d6fc294b2a-rootfs\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633430 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-conf-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633504 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-bin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633901 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-cni-binary-copy\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633940 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-bin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633965 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0271f00d-b420-4dee-aa8b-92d6fc294b2a-mcd-auth-proxy-config\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633980 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.633994 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-netns\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634005 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-binary-copy\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634009 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-multus-certs\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634032 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-multus-certs\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634044 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-system-cni-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634060 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634076 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-os-release\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634092 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-k8s-cni-cncf-io\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634109 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-kubelet\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634139 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-os-release\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634153 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-system-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634167 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-daemon-config\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634189 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj72w\" (UniqueName: \"kubernetes.io/projected/536def47-c9d3-4c3e-9b4a-3776e034998b-kube-api-access-zj72w\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634203 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cnibin\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634219 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-multus\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634232 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-cnibin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634251 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-etc-kubernetes\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634289 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-etc-kubernetes\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634311 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-system-cni-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634476 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0271f00d-b420-4dee-aa8b-92d6fc294b2a-mcd-auth-proxy-config\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634730 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634773 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-system-cni-dir\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634795 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-cnibin\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634807 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-cni-multus\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634841 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-var-lib-kubelet\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634871 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-k8s-cni-cncf-io\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634878 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-os-release\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634908 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-cnibin\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634929 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-os-release\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634942 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/536def47-c9d3-4c3e-9b4a-3776e034998b-host-run-netns\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.634996 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1fa9541c-8427-40d2-aa5d-b53cb430bddd-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.635359 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/536def47-c9d3-4c3e-9b4a-3776e034998b-multus-daemon-config\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.641036 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0271f00d-b420-4dee-aa8b-92d6fc294b2a-proxy-tls\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.647865 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4brt\" (UniqueName: \"kubernetes.io/projected/0271f00d-b420-4dee-aa8b-92d6fc294b2a-kube-api-access-p4brt\") pod \"machine-config-daemon-455ww\" (UID: \"0271f00d-b420-4dee-aa8b-92d6fc294b2a\") " pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.649787 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p6v6\" (UniqueName: \"kubernetes.io/projected/1fa9541c-8427-40d2-aa5d-b53cb430bddd-kube-api-access-6p6v6\") pod \"multus-additional-cni-plugins-qlft7\" (UID: \"1fa9541c-8427-40d2-aa5d-b53cb430bddd\") " pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.652137 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.655002 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj72w\" (UniqueName: \"kubernetes.io/projected/536def47-c9d3-4c3e-9b4a-3776e034998b-kube-api-access-zj72w\") pod \"multus-5mfwj\" (UID: \"536def47-c9d3-4c3e-9b4a-3776e034998b\") " pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.668477 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.689127 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.713989 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.734101 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.770110 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.793179 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.797764 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.800531 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qlft7" Dec 02 00:06:33 crc kubenswrapper[4856]: W1202 00:06:33.804654 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0271f00d_b420_4dee_aa8b_92d6fc294b2a.slice/crio-c40e66ccf57ab2847e91f4afc3e5d1d44a02b8847f87b941fd18534bc75811c2 WatchSource:0}: Error finding container c40e66ccf57ab2847e91f4afc3e5d1d44a02b8847f87b941fd18534bc75811c2: Status 404 returned error can't find the container with id c40e66ccf57ab2847e91f4afc3e5d1d44a02b8847f87b941fd18534bc75811c2 Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.811514 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5mfwj" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.817509 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.830686 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: W1202 00:06:33.833886 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod536def47_c9d3_4c3e_9b4a_3776e034998b.slice/crio-8a869b74935b04595431c96c206573a2a96cfa98523235c31d64570c7f4d796a WatchSource:0}: Error finding container 8a869b74935b04595431c96c206573a2a96cfa98523235c31d64570c7f4d796a: Status 404 returned error can't find the container with id 8a869b74935b04595431c96c206573a2a96cfa98523235c31d64570c7f4d796a Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.835128 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.835231 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:06:35.835214049 +0000 UTC m=+22.861582053 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.835316 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.835408 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.835445 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:35.835439305 +0000 UTC m=+22.861807309 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.869170 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.881259 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l5jg6"] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.882041 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.887019 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.893135 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.894672 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.914319 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.932517 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936343 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936384 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936408 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936435 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936465 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936497 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936522 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936545 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936565 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936603 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936629 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936629 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936655 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936662 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936690 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936712 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936735 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936668 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936768 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936793 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:35.936775749 +0000 UTC m=+22.963143763 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936811 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936834 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g5ht\" (UniqueName: \"kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936840 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936858 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936869 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936866 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936719 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936900 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:35.936890262 +0000 UTC m=+22.963258266 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936919 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: E1202 00:06:33.936931 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:35.936922363 +0000 UTC m=+22.963290367 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.936952 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.937016 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.937058 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.952829 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.972531 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 00:06:33 crc kubenswrapper[4856]: I1202 00:06:33.993373 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037738 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037781 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037805 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037837 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037856 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g5ht\" (UniqueName: \"kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037878 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037890 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037906 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037901 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037920 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037910 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.037936 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038079 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038115 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038139 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038159 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038159 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038179 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038199 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038207 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038217 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038238 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038260 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038279 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038304 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038303 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038335 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038337 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038354 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038375 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038374 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038284 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038391 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038456 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038492 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038910 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.038993 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.039203 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.040760 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.042249 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.067738 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g5ht\" (UniqueName: \"kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht\") pod \"ovnkube-node-l5jg6\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.101323 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.149240 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.188944 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.218072 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.223368 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: W1202 00:06:34.229812 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3051381c_49c8_4217_9831_013ca2931604.slice/crio-df203f6a4c9ca83756f8b45f6facc8ebf3e7a21853cc51d6d4de07eb99a911ed WatchSource:0}: Error finding container df203f6a4c9ca83756f8b45f6facc8ebf3e7a21853cc51d6d4de07eb99a911ed: Status 404 returned error can't find the container with id df203f6a4c9ca83756f8b45f6facc8ebf3e7a21853cc51d6d4de07eb99a911ed Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.251920 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.251984 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.252040 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.252123 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.252286 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.252445 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.270061 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.304368 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.350098 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.376387 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"df203f6a4c9ca83756f8b45f6facc8ebf3e7a21853cc51d6d4de07eb99a911ed"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.378432 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rl6j8" event={"ID":"c5969950-38f4-420c-8824-e9164238cacf","Type":"ContainerStarted","Data":"06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.378497 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rl6j8" event={"ID":"c5969950-38f4-420c-8824-e9164238cacf","Type":"ContainerStarted","Data":"57755e1790b4538d5503304b0ad5754e486c35092fba48bf41045d493e1584b4"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.381528 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.381888 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.381903 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"c40e66ccf57ab2847e91f4afc3e5d1d44a02b8847f87b941fd18534bc75811c2"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.386749 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerStarted","Data":"def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.386784 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerStarted","Data":"8a869b74935b04595431c96c206573a2a96cfa98523235c31d64570c7f4d796a"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.390866 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9" exitCode=0 Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.390926 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.390968 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerStarted","Data":"48747b76ba22a28ab12ed292d3e1b6f57857df76b8aabbf6339fb465dbb8d1e9"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.392221 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.423761 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.462187 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.528464 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.546548 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.588765 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.620918 4856 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.621346 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.622693 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.622746 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.622758 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.622863 4856 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.683733 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.698062 4856 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.698434 4856 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.701511 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.701582 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.701615 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.701641 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.701657 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.719537 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.722330 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.722366 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.722376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.722392 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.722401 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.734283 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.737836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.737918 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.737935 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.737960 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.737974 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.747548 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.750919 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.754715 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.754976 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.754987 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.755003 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.755013 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.766215 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.769773 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.769825 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.769838 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.769856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.769868 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.788694 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: E1202 00:06:34.788832 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.788796 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.790216 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.790252 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.790264 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.790319 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.790331 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.822917 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.862954 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.893636 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.893670 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.893682 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.893698 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.893709 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.900700 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.941297 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.982604 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.995536 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.995572 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.995582 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.995652 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:34 crc kubenswrapper[4856]: I1202 00:06:34.995664 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:34Z","lastTransitionTime":"2025-12-02T00:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.022387 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.061489 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.097340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.097371 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.097380 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.097393 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.097402 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.104274 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.143747 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.180344 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.199010 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.199064 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.199075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.199092 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.199102 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.301631 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.301664 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.301676 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.301691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.301702 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.394845 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.396496 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482" exitCode=0 Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.396556 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.398835 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" exitCode=0 Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.398862 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.402843 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.402872 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.402882 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.402898 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.402909 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.419515 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.451425 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.464496 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.477013 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.488471 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506394 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506858 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506923 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506951 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.506961 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.516579 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.533287 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.546225 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.580763 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.610679 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.610713 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.610723 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.610736 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.610745 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.619876 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.660262 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.698744 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.712655 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.712693 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.712702 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.712718 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.712730 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.743251 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-p5j4l"] Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.743571 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.750126 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.753245 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.772826 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.792875 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.813757 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.815518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.815552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.815561 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.815577 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.815603 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.853289 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.853412 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3e0398-8021-446b-b638-d498b5032575-host\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.853470 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.853517 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:06:39.853480796 +0000 UTC m=+26.879848810 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.853547 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.853607 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee3e0398-8021-446b-b638-d498b5032575-serviceca\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.853624 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:39.85360866 +0000 UTC m=+26.879976734 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.853653 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x56rh\" (UniqueName: \"kubernetes.io/projected/ee3e0398-8021-446b-b638-d498b5032575-kube-api-access-x56rh\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.866286 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.901860 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.918049 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.918085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.918099 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.918116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.918129 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:35Z","lastTransitionTime":"2025-12-02T00:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.939514 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954348 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee3e0398-8021-446b-b638-d498b5032575-serviceca\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954397 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x56rh\" (UniqueName: \"kubernetes.io/projected/ee3e0398-8021-446b-b638-d498b5032575-kube-api-access-x56rh\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954419 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954437 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3e0398-8021-446b-b638-d498b5032575-host\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954463 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954485 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954558 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954613 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:39.954583674 +0000 UTC m=+26.980951678 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.954642 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee3e0398-8021-446b-b638-d498b5032575-host\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954674 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954709 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954720 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954746 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954774 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954791 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954776 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:39.954759219 +0000 UTC m=+26.981127223 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:35 crc kubenswrapper[4856]: E1202 00:06:35.954864 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:39.954845001 +0000 UTC m=+26.981213125 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.955644 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee3e0398-8021-446b-b638-d498b5032575-serviceca\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:35 crc kubenswrapper[4856]: I1202 00:06:35.981068 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.008583 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x56rh\" (UniqueName: \"kubernetes.io/projected/ee3e0398-8021-446b-b638-d498b5032575-kube-api-access-x56rh\") pod \"node-ca-p5j4l\" (UID: \"ee3e0398-8021-446b-b638-d498b5032575\") " pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.020620 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.020657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.020669 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.020684 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.020693 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.048569 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.058267 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-p5j4l" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.082131 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.122445 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.124149 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.124179 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.124190 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.124209 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.124221 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: W1202 00:06:36.151695 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee3e0398_8021_446b_b638_d498b5032575.slice/crio-121c62d6f5852fbe8f017d6dcd4c5aec860c00b0d8254986d26f3b7b5e6d3d74 WatchSource:0}: Error finding container 121c62d6f5852fbe8f017d6dcd4c5aec860c00b0d8254986d26f3b7b5e6d3d74: Status 404 returned error can't find the container with id 121c62d6f5852fbe8f017d6dcd4c5aec860c00b0d8254986d26f3b7b5e6d3d74 Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.167412 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.204807 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.227643 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.227837 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.227864 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.227887 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.227901 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.244573 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.252210 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.252266 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.252266 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:36 crc kubenswrapper[4856]: E1202 00:06:36.252354 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:36 crc kubenswrapper[4856]: E1202 00:06:36.252504 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:36 crc kubenswrapper[4856]: E1202 00:06:36.252606 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.282832 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.323675 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.330173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.330199 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.330208 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.330224 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.330236 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.363303 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.405968 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-p5j4l" event={"ID":"ee3e0398-8021-446b-b638-d498b5032575","Type":"ContainerStarted","Data":"a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.406011 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-p5j4l" event={"ID":"ee3e0398-8021-446b-b638-d498b5032575","Type":"ContainerStarted","Data":"121c62d6f5852fbe8f017d6dcd4c5aec860c00b0d8254986d26f3b7b5e6d3d74"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411322 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411545 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411624 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411640 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411654 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411668 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.411681 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.415463 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a" exitCode=0 Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.416138 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.434827 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.434897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.434912 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.434935 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.434975 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.442914 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.483326 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.524203 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.539579 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.539719 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.539736 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.539761 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.539776 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.563583 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.605812 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644041 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644100 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644129 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644145 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.644628 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.686734 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.738698 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.748348 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.748387 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.748397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.748412 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.748424 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.764974 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.800524 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.845439 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.851255 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.851312 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.851328 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.851352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.851376 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.881814 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.922484 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.954461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.954526 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.954548 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.954573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.954617 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:36Z","lastTransitionTime":"2025-12-02T00:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:36 crc kubenswrapper[4856]: I1202 00:06:36.971108 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.009365 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.043645 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.058161 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.058208 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.058223 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.058239 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.058250 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.160744 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.160790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.160802 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.160818 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.160829 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.264400 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.264469 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.264497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.264544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.264817 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.367397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.367474 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.367499 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.367524 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.367642 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.421963 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f" exitCode=0 Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.422007 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.439295 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.458132 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.470228 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.470253 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.470262 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.470275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.470284 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.484220 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.501226 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.517131 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.539403 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.555081 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.570540 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.572621 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.572647 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.572657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.572671 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.572681 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.585682 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.604220 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.626734 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.643029 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.656964 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.668399 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.674721 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.674761 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.674774 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.674792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.674805 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.684414 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:37Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.777877 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.777924 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.777938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.777956 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.777973 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.880750 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.880853 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.880876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.880905 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.880924 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.983111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.983139 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.983148 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.983163 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:37 crc kubenswrapper[4856]: I1202 00:06:37.983173 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:37Z","lastTransitionTime":"2025-12-02T00:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.085038 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.085094 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.085110 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.085133 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.085152 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.187662 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.187703 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.187711 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.187726 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.187734 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.251151 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.251212 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.251174 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:38 crc kubenswrapper[4856]: E1202 00:06:38.251268 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:38 crc kubenswrapper[4856]: E1202 00:06:38.251321 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:38 crc kubenswrapper[4856]: E1202 00:06:38.251385 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.290239 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.290273 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.290281 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.290295 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.290304 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.392941 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.392979 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.392990 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.393006 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.393019 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.427809 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b" exitCode=0 Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.427849 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.445807 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.459213 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.481355 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.494534 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.496228 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.496268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.496279 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.496295 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.496307 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.506874 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.519813 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.534035 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.542684 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.559985 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.580347 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.592657 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.598178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.598210 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.598222 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.598238 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.598249 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.603473 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.612496 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.622544 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.631602 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:38Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.700639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.700924 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.700936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.700953 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.700963 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.803811 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.803869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.803881 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.803895 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.803905 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.906505 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.906534 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.906542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.906557 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:38 crc kubenswrapper[4856]: I1202 00:06:38.906566 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:38Z","lastTransitionTime":"2025-12-02T00:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.009420 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.009466 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.009477 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.009497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.009510 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.112130 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.112166 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.112174 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.112189 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.112198 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.214129 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.214173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.214184 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.214199 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.214211 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.317039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.317087 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.317100 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.317118 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.317131 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.420378 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.420418 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.420428 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.420449 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.420460 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.439324 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.443313 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fa9541c-8427-40d2-aa5d-b53cb430bddd" containerID="ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932" exitCode=0 Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.443357 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerDied","Data":"ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.463123 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.482902 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.494721 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.513047 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.523160 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.523197 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.523206 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.523220 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.523230 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.526843 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.541083 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.553443 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.565389 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.577071 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.601337 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.619770 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.627325 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.627351 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.627359 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.627372 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.627381 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.633508 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.649858 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.663506 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.676481 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:39Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.729778 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.729803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.729811 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.729824 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.729832 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.832157 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.832203 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.832215 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.832233 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.832245 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.894558 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.894741 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.894824 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.894806162 +0000 UTC m=+34.921174166 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.894859 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.894956 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.894947196 +0000 UTC m=+34.921315200 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.935344 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.935376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.935385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.935398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.935406 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:39Z","lastTransitionTime":"2025-12-02T00:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.996201 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.996260 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:39 crc kubenswrapper[4856]: I1202 00:06:39.996311 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996348 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996401 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.996385365 +0000 UTC m=+35.022753369 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996444 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996466 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996483 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996498 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996528 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996539 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996542 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.996525039 +0000 UTC m=+35.022893073 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:39 crc kubenswrapper[4856]: E1202 00:06:39.996609 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.99657324 +0000 UTC m=+35.022941244 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.038069 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.038113 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.038130 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.038154 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.038170 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.141159 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.141283 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.141312 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.141339 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.141362 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.243801 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.243843 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.243854 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.243869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.243877 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.252080 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.252129 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.252159 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:40 crc kubenswrapper[4856]: E1202 00:06:40.252200 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:40 crc kubenswrapper[4856]: E1202 00:06:40.252268 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:40 crc kubenswrapper[4856]: E1202 00:06:40.252418 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.345828 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.345872 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.345886 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.345903 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.345916 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.447766 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.447823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.447841 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.447868 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.447884 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.451886 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" event={"ID":"1fa9541c-8427-40d2-aa5d-b53cb430bddd","Type":"ContainerStarted","Data":"49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.549426 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.549497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.549517 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.549542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.549560 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.652073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.652122 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.652141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.652173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.652197 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.754769 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.754817 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.754875 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.754938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.754952 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.857500 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.857535 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.857546 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.857563 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.857573 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.959504 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.959544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.959556 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.959572 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:40 crc kubenswrapper[4856]: I1202 00:06:40.959582 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:40Z","lastTransitionTime":"2025-12-02T00:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.061876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.062401 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.062673 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.063121 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.063372 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.165529 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.165865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.166004 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.166312 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.166571 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.268616 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.268907 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.269077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.269258 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.269419 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.371880 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.372088 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.372185 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.372275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.372354 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.459053 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.460684 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.472634 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.475164 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.475217 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.475231 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.475251 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.475263 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.492666 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.507278 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.542913 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.554878 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.566187 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.577276 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.577303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.577312 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.577327 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.577337 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.578935 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.593196 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.605329 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.623132 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.631851 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.649404 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.661048 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.671524 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.679340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.679380 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.679389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.679404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.679415 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.682628 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.691897 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.707109 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.718890 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.733258 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.745282 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.764885 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.779354 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.782337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.782402 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.782424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.782454 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.782476 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.795842 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.811960 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.836770 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.859982 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.874196 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.885120 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.885149 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.885162 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.885180 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.885192 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.888220 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.911755 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.931102 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.948340 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:41Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.987454 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.987492 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.987502 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.987518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:41 crc kubenswrapper[4856]: I1202 00:06:41.987530 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:41Z","lastTransitionTime":"2025-12-02T00:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.090776 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.090846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.090858 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.090876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.090888 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.194057 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.194118 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.194135 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.194163 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.194180 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.251898 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.251945 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:42 crc kubenswrapper[4856]: E1202 00:06:42.252085 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.252577 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:42 crc kubenswrapper[4856]: E1202 00:06:42.252693 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:42 crc kubenswrapper[4856]: E1202 00:06:42.252771 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.297222 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.297261 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.297272 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.297294 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.297306 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.400487 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.400550 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.400564 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.400607 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.400623 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.463416 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.463486 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.484563 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.498294 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.503609 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.503661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.503678 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.503703 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.503718 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.511898 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.531758 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.550706 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.562884 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.577489 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.591442 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.606006 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.606052 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.606063 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.606082 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.606094 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.607165 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.621065 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.639493 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.657156 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.671171 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.684384 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.696191 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.709150 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.709195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.709208 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.709232 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.709244 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.716232 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:42Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.810892 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.810930 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.810942 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.810958 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.810969 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.912809 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.912836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.912845 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.912857 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:42 crc kubenswrapper[4856]: I1202 00:06:42.912865 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:42Z","lastTransitionTime":"2025-12-02T00:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.015171 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.015207 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.015218 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.015232 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.015243 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.117777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.117816 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.117825 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.117839 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.117849 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.238463 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.238500 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.238512 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.238532 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.238544 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.281360 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.298881 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.311483 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.326145 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.340910 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.341083 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.341109 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.341117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.341131 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.341142 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.354807 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.375754 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.389854 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.403741 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.419435 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.435781 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.443760 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.443804 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.443821 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.443840 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.443853 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.449739 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.466418 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.484140 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.501388 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.547273 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.547321 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.547335 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.547357 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.547373 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.649522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.649571 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.649583 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.649615 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.649628 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.752033 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.752085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.752101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.752120 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.752132 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.855033 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.855066 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.855074 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.855093 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.855104 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.957127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.957203 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.957229 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.957262 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:43 crc kubenswrapper[4856]: I1202 00:06:43.957285 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:43Z","lastTransitionTime":"2025-12-02T00:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.059408 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.059465 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.059481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.059501 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.059514 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.162338 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.162397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.162414 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.162440 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.162457 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.251925 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.251995 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.251989 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:44 crc kubenswrapper[4856]: E1202 00:06:44.252081 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:44 crc kubenswrapper[4856]: E1202 00:06:44.252208 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:44 crc kubenswrapper[4856]: E1202 00:06:44.252302 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.263908 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.263939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.263947 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.263961 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.263974 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.366745 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.366805 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.366817 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.366834 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.366846 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.468566 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.468644 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.468663 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.468685 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.468702 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.470568 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/0.log" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.473096 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19" exitCode=1 Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.473175 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.474195 4856 scope.go:117] "RemoveContainer" containerID="186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.493255 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.509391 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.526637 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.538049 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.560045 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.571687 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.571723 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.571735 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.571754 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.571766 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.572895 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.584780 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.602039 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.616521 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.633197 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.646040 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.666496 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.673577 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.673642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.673656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.673671 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.673681 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.678799 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.690411 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.704000 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:44Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.775724 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.775760 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.775771 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.775788 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.775799 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.877707 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.878006 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.878017 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.878034 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.878046 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.979895 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.979927 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.979937 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.979950 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:44 crc kubenswrapper[4856]: I1202 00:06:44.979960 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:44Z","lastTransitionTime":"2025-12-02T00:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.081989 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.082026 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.082037 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.082054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.082067 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.104113 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.104151 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.104165 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.104182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.104194 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.118499 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.121429 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.121452 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.121461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.121476 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.121487 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.132476 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.135618 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.135649 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.135660 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.135677 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.135687 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.147468 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.149792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.149830 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.149840 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.149856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.149866 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.163168 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.166710 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.166738 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.166749 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.166764 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.166774 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.177058 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: E1202 00:06:45.177179 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.183406 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.183434 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.183445 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.183459 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.183469 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.234297 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.248307 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.272189 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.286008 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.286048 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.286059 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.286080 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.286090 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.287559 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.304459 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.320896 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.332322 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.341936 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.358300 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.373850 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.384842 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.388404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.388467 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.388483 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.388509 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.388522 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.404928 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.423369 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.442819 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.455360 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.466827 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.478410 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/0.log" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.480840 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.481407 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.490497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.490521 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.490530 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.490542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.490552 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.500056 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.509865 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.538692 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.561468 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.582711 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.593104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.593284 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.593385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.593479 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.593570 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.597725 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.616539 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.633578 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.650141 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.668741 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.688968 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.696144 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.696231 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.696251 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.696275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.696294 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.710511 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.731915 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.754340 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.766239 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9"] Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.766972 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.769432 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.769643 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.777252 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.799727 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.799775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.799796 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.799823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.799841 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.805015 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.824741 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.841906 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.856025 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.856092 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.856151 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6467r\" (UniqueName: \"kubernetes.io/projected/1b19beb5-329d-48ef-bce0-8e299b9a21c5-kube-api-access-6467r\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.856203 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.870897 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.891860 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.901755 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.901807 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.901826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.901847 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.901862 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:45Z","lastTransitionTime":"2025-12-02T00:06:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.916143 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.933228 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.948511 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.958234 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6467r\" (UniqueName: \"kubernetes.io/projected/1b19beb5-329d-48ef-bce0-8e299b9a21c5-kube-api-access-6467r\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.958329 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.958452 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.958507 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.959307 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.959893 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.963770 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.965150 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b19beb5-329d-48ef-bce0-8e299b9a21c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.973904 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.982695 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6467r\" (UniqueName: \"kubernetes.io/projected/1b19beb5-329d-48ef-bce0-8e299b9a21c5-kube-api-access-6467r\") pod \"ovnkube-control-plane-749d76644c-jzwq9\" (UID: \"1b19beb5-329d-48ef-bce0-8e299b9a21c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:45 crc kubenswrapper[4856]: I1202 00:06:45.991570 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:45Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.002998 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.004673 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.004703 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.004715 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.004733 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.004745 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.012713 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.044581 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.062011 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.079012 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.091157 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.106637 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.106667 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.106679 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.106695 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.106707 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.208910 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.208947 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.208958 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.208980 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.208993 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.251454 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.251535 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.251454 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:46 crc kubenswrapper[4856]: E1202 00:06:46.251683 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:46 crc kubenswrapper[4856]: E1202 00:06:46.251583 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:46 crc kubenswrapper[4856]: E1202 00:06:46.251789 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.312036 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.312081 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.312091 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.312107 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.312118 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.415386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.415438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.415451 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.415468 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.415480 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.486849 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/1.log" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.487860 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/0.log" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.491973 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02" exitCode=1 Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.492108 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.492231 4856 scope.go:117] "RemoveContainer" containerID="186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.492980 4856 scope.go:117] "RemoveContainer" containerID="ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02" Dec 02 00:06:46 crc kubenswrapper[4856]: E1202 00:06:46.493208 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.495866 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" event={"ID":"1b19beb5-329d-48ef-bce0-8e299b9a21c5","Type":"ContainerStarted","Data":"e6fb286b99cd9f72f657a04cd50b65aa632b58c0a5db817daf65aa3ca7aa2457"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.514152 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.518129 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.518171 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.518185 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.518202 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.518216 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.528397 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.559849 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.580717 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.602499 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.614636 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.619881 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.619912 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.619923 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.619939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.619950 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.626533 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.637569 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.648570 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.658759 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.671422 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.683397 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.697955 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.710224 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.722362 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.722403 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.722415 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.722431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.722441 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.724508 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.741080 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:46Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.824999 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.825032 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.825043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.825057 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.825068 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.927256 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.927300 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.927319 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.927337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:46 crc kubenswrapper[4856]: I1202 00:06:46.927349 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:46Z","lastTransitionTime":"2025-12-02T00:06:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.030404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.030523 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.030542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.030567 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.030608 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.134386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.134452 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.134477 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.134507 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.134529 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.237057 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.237149 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.237169 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.237200 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.237225 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.274358 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-4zvgr"] Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.274820 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.274883 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.298716 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.313056 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.328706 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.338747 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.339541 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.339793 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.339951 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.340275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.340473 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.359355 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://186d624aa5141befea211ab69717e26fe784b98d6deccc7d6136ffc6b3159b19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:43Z\\\",\\\"message\\\":\\\"irewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.376826 6147 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376842 6147 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376913 6147 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 00:06:43.376986 6147 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377043 6147 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 00:06:43.377206 6147 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.377285 6147 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 00:06:43.378286 6147 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.371872 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.371930 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xf44\" (UniqueName: \"kubernetes.io/projected/cbedea3d-dea3-407d-aae3-2ac725bcab34-kube-api-access-7xf44\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.385530 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.397131 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.407443 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.421978 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.433094 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.442831 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.443123 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.443145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.443153 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.443310 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.443324 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.455836 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.465970 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.473092 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.473152 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xf44\" (UniqueName: \"kubernetes.io/projected/cbedea3d-dea3-407d-aae3-2ac725bcab34-kube-api-access-7xf44\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.473378 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.473417 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:47.973404695 +0000 UTC m=+34.999772699 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.480433 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.489245 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xf44\" (UniqueName: \"kubernetes.io/projected/cbedea3d-dea3-407d-aae3-2ac725bcab34-kube-api-access-7xf44\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.492689 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.501164 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" event={"ID":"1b19beb5-329d-48ef-bce0-8e299b9a21c5","Type":"ContainerStarted","Data":"3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.501209 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" event={"ID":"1b19beb5-329d-48ef-bce0-8e299b9a21c5","Type":"ContainerStarted","Data":"693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.502858 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/1.log" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.505564 4856 scope.go:117] "RemoveContainer" containerID="ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02" Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.505720 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.508136 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.521271 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.531298 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.545965 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.545997 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.546008 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.546026 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.546037 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.547428 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.561276 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.576801 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.591224 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.605962 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.616648 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.632739 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.644976 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.648601 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.648646 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.648657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.648675 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.648688 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.656713 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.673266 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.685283 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.694290 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.705371 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.720819 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.738562 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751354 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751399 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751411 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751388 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:47Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751430 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.751456 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.854615 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.854664 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.854675 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.854693 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.854706 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.956583 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.956631 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.956639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.956652 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.956661 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:47Z","lastTransitionTime":"2025-12-02T00:06:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.978461 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.978546 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:07:03.978529676 +0000 UTC m=+51.004897680 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.978625 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:47 crc kubenswrapper[4856]: I1202 00:06:47.978723 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.978863 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.978933 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:03.978913006 +0000 UTC m=+51.005281080 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.979406 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:47 crc kubenswrapper[4856]: E1202 00:06:47.979465 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:48.97944924 +0000 UTC m=+36.005817344 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.058560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.058653 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.058674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.058699 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.058717 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.079736 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.079782 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.079807 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.079885 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.079917 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:04.079907893 +0000 UTC m=+51.106275897 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.079936 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.079971 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080022 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080080 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:04.080061277 +0000 UTC m=+51.106429321 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080079 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080129 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080152 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.080255 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:04.080223701 +0000 UTC m=+51.106591745 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.161313 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.161398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.161416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.161434 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.161446 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.251570 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.251653 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.251768 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.251786 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.251916 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.252110 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.263534 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.263586 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.263643 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.263667 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.263688 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.366307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.366370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.366393 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.366416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.366431 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.469338 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.469390 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.469401 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.469421 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.469437 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.571302 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.571337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.571348 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.571364 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.571376 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.673815 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.673881 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.673900 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.673925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.673945 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.776039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.776077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.776086 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.776122 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.776132 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.879407 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.879489 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.879512 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.879542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.879566 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.982198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.982259 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.982276 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.982300 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.982317 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:48Z","lastTransitionTime":"2025-12-02T00:06:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:48 crc kubenswrapper[4856]: I1202 00:06:48.988833 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.989070 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:48 crc kubenswrapper[4856]: E1202 00:06:48.989184 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:50.989155577 +0000 UTC m=+38.015523651 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.086478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.086573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.086642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.086679 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.086704 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.190246 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.190288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.190298 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.190315 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.190326 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.251556 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:49 crc kubenswrapper[4856]: E1202 00:06:49.251764 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.293067 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.293111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.293127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.293151 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.293167 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.395037 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.395086 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.395098 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.395116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.395128 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.497244 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.497294 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.497309 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.497333 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.497349 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.599864 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.599919 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.599939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.599960 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.599973 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.702374 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.702452 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.702466 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.702513 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.702528 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.804646 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.804704 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.804722 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.804745 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.804763 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.906560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.906648 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.906666 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.906686 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:49 crc kubenswrapper[4856]: I1202 00:06:49.906698 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:49Z","lastTransitionTime":"2025-12-02T00:06:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.008896 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.008941 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.008958 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.008979 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.008994 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.111401 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.111714 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.111817 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.111985 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.112099 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.215324 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.215680 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.215817 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.215927 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.216025 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.252098 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:50 crc kubenswrapper[4856]: E1202 00:06:50.252223 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.252099 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:50 crc kubenswrapper[4856]: E1202 00:06:50.252362 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.252719 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:50 crc kubenswrapper[4856]: E1202 00:06:50.252991 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.320018 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.320077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.320101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.320131 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.320154 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.422934 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.423004 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.423024 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.423051 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.423073 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.573135 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.573180 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.573191 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.573209 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.573224 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.676075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.676118 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.676127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.676141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.676151 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.779405 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.779441 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.779449 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.779462 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.779472 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.881364 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.881439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.881449 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.881464 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.881506 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.983715 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.983752 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.983762 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.983775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:50 crc kubenswrapper[4856]: I1202 00:06:50.983784 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:50Z","lastTransitionTime":"2025-12-02T00:06:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.009527 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:51 crc kubenswrapper[4856]: E1202 00:06:51.009667 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:51 crc kubenswrapper[4856]: E1202 00:06:51.009721 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:06:55.00970701 +0000 UTC m=+42.036075004 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.085999 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.086043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.086060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.086077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.086088 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.188584 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.188630 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.188646 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.188667 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.188679 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.252925 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:51 crc kubenswrapper[4856]: E1202 00:06:51.253109 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.290573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.290637 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.290646 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.290661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.290680 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.392835 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.392866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.392874 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.392889 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.392900 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.495820 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.495865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.495878 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.495902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.495914 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.598733 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.598776 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.598787 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.598805 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.598820 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.701323 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.701372 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.701386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.701403 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.701417 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.803160 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.803201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.803215 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.803232 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.803244 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.905809 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.905879 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.905901 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.905936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:51 crc kubenswrapper[4856]: I1202 00:06:51.905956 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:51Z","lastTransitionTime":"2025-12-02T00:06:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.008866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.008927 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.008945 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.008975 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.009032 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.111298 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.111404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.111414 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.111427 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.111435 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.213944 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.213985 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.213995 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.214010 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.214019 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.251785 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.251818 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.251856 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:52 crc kubenswrapper[4856]: E1202 00:06:52.251960 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:52 crc kubenswrapper[4856]: E1202 00:06:52.252061 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:52 crc kubenswrapper[4856]: E1202 00:06:52.252195 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.315738 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.315774 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.315785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.315802 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.315813 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.418340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.418393 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.418405 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.418423 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.418434 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.520751 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.520813 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.520836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.520865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.520885 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.624221 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.624268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.624279 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.624296 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.624305 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.727350 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.727611 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.727623 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.727638 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.727650 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.829564 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.829648 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.829661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.829680 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.829695 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.932131 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.932200 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.932216 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.932238 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:52 crc kubenswrapper[4856]: I1202 00:06:52.932255 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:52Z","lastTransitionTime":"2025-12-02T00:06:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.035192 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.035257 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.035270 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.035285 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.035297 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.137656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.137702 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.137713 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.137729 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.137739 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.240876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.240925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.240942 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.240965 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.240982 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.251494 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:53 crc kubenswrapper[4856]: E1202 00:06:53.251689 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.263184 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.288809 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.306254 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.326792 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.342868 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.344020 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.344059 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.344068 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.344080 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.344088 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.355608 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.366525 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.375744 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.387827 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.398739 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.424089 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.442238 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.446086 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.446158 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.446187 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.446209 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.446223 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.456965 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.468767 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.482509 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.498300 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.510019 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:53Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.548196 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.548446 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.548544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.548633 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.548706 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.651884 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.651925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.651936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.651952 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.651964 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.753984 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.754043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.754071 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.754094 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.754110 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.856767 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.856843 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.856867 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.856897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.856921 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.959739 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.959776 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.959789 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.959827 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:53 crc kubenswrapper[4856]: I1202 00:06:53.959839 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:53Z","lastTransitionTime":"2025-12-02T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.062342 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.062400 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.062422 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.062450 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.062474 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.165334 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.165396 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.165416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.165441 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.165459 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.251955 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.252012 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:54 crc kubenswrapper[4856]: E1202 00:06:54.252087 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:54 crc kubenswrapper[4856]: E1202 00:06:54.252154 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.252012 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:54 crc kubenswrapper[4856]: E1202 00:06:54.252385 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.268138 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.268171 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.268180 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.268222 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.268232 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.370571 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.370933 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.371081 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.371228 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.371397 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.474156 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.474544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.474786 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.475122 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.475286 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.578606 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.578649 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.578660 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.578675 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.578688 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.681140 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.681227 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.681244 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.681268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.681285 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.783889 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.783931 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.783942 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.783959 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.783970 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.887179 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.887234 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.887252 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.887277 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.887294 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.989737 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.989775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.989784 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.989798 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:54 crc kubenswrapper[4856]: I1202 00:06:54.989806 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:54Z","lastTransitionTime":"2025-12-02T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.048625 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.048737 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.048790 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:03.048775305 +0000 UTC m=+50.075143309 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.092751 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.092837 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.092862 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.092955 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.093021 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.196035 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.196087 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.196098 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.196113 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.196124 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.241683 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.241738 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.241746 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.241759 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.241769 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.314227 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.314385 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.324692 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:55Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.328607 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.328715 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.328786 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.328917 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.328999 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.344159 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:55Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.354830 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.354920 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.354941 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.354969 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.354989 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.372365 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:55Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.377117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.377154 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.377165 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.377185 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.377198 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.395051 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:55Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.399777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.399823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.399843 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.399874 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.399893 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.419317 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:55Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:55 crc kubenswrapper[4856]: E1202 00:06:55.419573 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.421390 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.421458 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.421483 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.421518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.421541 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.525281 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.525338 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.525351 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.525375 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.525388 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.628742 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.628807 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.628823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.628847 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.628867 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.731741 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.731803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.731825 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.731855 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.731878 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.834775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.834836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.834854 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.834879 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.834898 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.937773 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.937845 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.937865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.937897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:55 crc kubenswrapper[4856]: I1202 00:06:55.937917 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:55Z","lastTransitionTime":"2025-12-02T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.040350 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.040386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.040394 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.040407 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.040415 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.143503 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.143560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.143577 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.143640 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.143663 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.248755 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.248828 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.248842 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.248876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.248902 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.252095 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.252153 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:56 crc kubenswrapper[4856]: E1202 00:06:56.252236 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.252105 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:56 crc kubenswrapper[4856]: E1202 00:06:56.252356 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:56 crc kubenswrapper[4856]: E1202 00:06:56.252668 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.351775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.351823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.351835 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.351853 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.351866 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.454327 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.454410 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.454438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.454471 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.454496 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.557440 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.557517 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.557541 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.557576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.557666 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.660629 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.660664 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.660674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.660689 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.660699 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.762688 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.762740 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.762748 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.762765 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.762774 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.865287 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.865329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.865341 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.865362 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.865374 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.967503 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.967560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.967575 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.967618 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:56 crc kubenswrapper[4856]: I1202 00:06:56.967636 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:56Z","lastTransitionTime":"2025-12-02T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.070435 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.070502 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.070523 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.070552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.070572 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.173880 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.173947 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.173971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.173999 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.174017 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.251800 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:57 crc kubenswrapper[4856]: E1202 00:06:57.252053 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.276352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.276411 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.276431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.276453 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.276476 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.379224 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.379260 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.379271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.379287 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.379298 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.482252 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.482318 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.482343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.482374 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.482395 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.585116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.585162 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.585175 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.585195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.585208 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.687791 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.687858 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.687875 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.687900 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.687917 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.790039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.790116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.790139 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.790170 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.790192 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.892831 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.892959 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.892981 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.893039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.893058 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.996215 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.996321 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.996344 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.996371 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:57 crc kubenswrapper[4856]: I1202 00:06:57.996391 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:57Z","lastTransitionTime":"2025-12-02T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.098838 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.098892 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.098913 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.098942 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.098967 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.201476 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.201514 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.201523 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.201535 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.201544 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.252261 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.252311 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:06:58 crc kubenswrapper[4856]: E1202 00:06:58.252416 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.252432 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:06:58 crc kubenswrapper[4856]: E1202 00:06:58.252690 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:06:58 crc kubenswrapper[4856]: E1202 00:06:58.252822 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.304914 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.304985 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.305010 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.305043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.305064 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.407856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.407892 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.407902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.407919 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.407931 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.510232 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.510282 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.510298 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.510321 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.510339 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.612544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.612584 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.612607 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.612623 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.612634 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.715104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.715161 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.715177 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.715201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.715217 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.818446 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.818498 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.818508 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.818522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.818533 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.920913 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.920993 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.921019 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.921049 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:58 crc kubenswrapper[4856]: I1202 00:06:58.921106 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:58Z","lastTransitionTime":"2025-12-02T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.024529 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.024586 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.024643 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.024671 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.024695 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.081945 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.094948 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.099568 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.115697 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.127154 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.127213 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.127233 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.127256 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.127272 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.134440 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.148345 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.158814 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.175108 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.192620 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.203898 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.214341 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.229748 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.229807 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.229822 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.229842 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.229855 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.230239 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.248693 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.251390 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:06:59 crc kubenswrapper[4856]: E1202 00:06:59.251543 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.264337 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.277521 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.293419 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.308256 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.321746 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.332438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.332490 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.332501 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.332520 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.332532 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.334557 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:06:59Z is after 2025-08-24T17:21:41Z" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.435115 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.435169 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.435203 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.435224 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.435233 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.538355 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.538414 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.538432 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.538457 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.538475 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.641066 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.641130 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.641145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.641167 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.641199 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.744632 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.744930 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.745038 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.745159 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.745274 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.848090 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.848134 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.848147 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.848205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.848220 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.951072 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.951146 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.951163 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.951191 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:06:59 crc kubenswrapper[4856]: I1202 00:06:59.951208 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:06:59Z","lastTransitionTime":"2025-12-02T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.053385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.053421 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.053431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.053445 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.053454 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.155353 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.155408 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.155423 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.155444 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.155458 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.251475 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.251505 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.251475 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:00 crc kubenswrapper[4856]: E1202 00:07:00.251708 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:00 crc kubenswrapper[4856]: E1202 00:07:00.251618 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:00 crc kubenswrapper[4856]: E1202 00:07:00.251800 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.258142 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.258182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.258190 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.258224 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.258235 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.360511 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.360549 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.360558 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.360573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.360582 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.463253 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.463324 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.463348 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.463375 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.463392 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.565816 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.565861 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.565877 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.565898 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.565916 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.668141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.668221 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.668247 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.668276 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.668298 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.770409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.770498 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.770509 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.770527 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.770537 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.872376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.872409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.872418 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.872431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.872441 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.974914 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.974956 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.974964 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.974977 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:00 crc kubenswrapper[4856]: I1202 00:07:00.974986 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:00Z","lastTransitionTime":"2025-12-02T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.077460 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.077503 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.077518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.077539 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.077556 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.180492 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.180534 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.180550 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.180565 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.180575 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.252300 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:01 crc kubenswrapper[4856]: E1202 00:07:01.252492 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.283768 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.283797 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.283805 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.283817 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.283826 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.386271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.386317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.386332 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.386354 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.386389 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.489028 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.489121 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.489145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.489219 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.489239 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.592520 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.592633 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.592660 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.592690 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.592713 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.696508 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.697009 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.697202 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.697792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.698148 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.801493 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.801869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.802010 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.802136 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.802253 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.905542 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.905632 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.905658 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.905694 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:01 crc kubenswrapper[4856]: I1202 00:07:01.905715 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:01Z","lastTransitionTime":"2025-12-02T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.008626 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.008666 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.008675 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.008692 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.008700 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.111536 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.111577 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.111604 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.111620 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.111629 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.214111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.214185 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.214205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.214233 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.214251 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.252069 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.252272 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.252418 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:02 crc kubenswrapper[4856]: E1202 00:07:02.252264 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:02 crc kubenswrapper[4856]: E1202 00:07:02.252740 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:02 crc kubenswrapper[4856]: E1202 00:07:02.253840 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.255107 4856 scope.go:117] "RemoveContainer" containerID="ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.316790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.316819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.316829 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.316842 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.316852 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.419252 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.419293 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.419305 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.419322 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.419335 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.522111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.522168 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.522184 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.522204 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.522220 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.567303 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/1.log" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.570326 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.570784 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.589648 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.603762 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.615554 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.624375 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.624411 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.624419 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.624435 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.624445 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.634395 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.648756 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.660167 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.693985 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.712286 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.726778 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.726815 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.726826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.726841 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.726852 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.764285 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.776568 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.784342 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.790793 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.799942 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.808221 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.819121 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.828799 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.828820 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.828846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.828863 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.828871 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.832180 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.851956 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.864658 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:02Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.931680 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.931727 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.931737 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.931753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:02 crc kubenswrapper[4856]: I1202 00:07:02.931762 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:02Z","lastTransitionTime":"2025-12-02T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.033344 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.033376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.033386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.033398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.033410 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.135908 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.135974 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.135996 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.136020 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.136041 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.141659 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:03 crc kubenswrapper[4856]: E1202 00:07:03.141848 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:03 crc kubenswrapper[4856]: E1202 00:07:03.141955 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:19.141929606 +0000 UTC m=+66.168297650 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.239234 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.239343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.239365 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.239389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.239407 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.251738 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:03 crc kubenswrapper[4856]: E1202 00:07:03.251916 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.271166 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.285251 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.297530 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.312178 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.333155 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.342404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.342441 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.342455 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.342472 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.342483 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.354222 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.371000 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.388887 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.404259 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.415868 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.432150 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.444583 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.444639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.444653 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.444672 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.444686 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.455239 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.478925 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.495229 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.509355 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.525175 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.539782 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.546886 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.546957 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.546982 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.547014 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.547038 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.562608 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.579115 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/2.log" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.579583 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/1.log" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.582843 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" exitCode=1 Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.582878 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.582932 4856 scope.go:117] "RemoveContainer" containerID="ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.584049 4856 scope.go:117] "RemoveContainer" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" Dec 02 00:07:03 crc kubenswrapper[4856]: E1202 00:07:03.584287 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.607478 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.620278 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.633928 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.645667 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.649261 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.649303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.649317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.649335 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.649347 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.657226 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.675345 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce77f61de2ca77d9ff611b7d63090b9ce543a249b1ee7489d79b80378b67ad02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:06:46Z\\\",\\\"message\\\":\\\"false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.224698 6293 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI1202 00:06:45.225306 6293 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:06:45.225457 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1202 00:06:45.225541 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 00:06:45.225650 6293 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.692665 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.706757 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.719087 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.732065 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.743800 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.751903 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.751938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.751951 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.751970 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.751981 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.756417 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.769360 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.780788 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.792389 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.806528 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.827106 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.842946 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:03Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.854119 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.854170 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.854182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.854204 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.854217 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.957573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.957643 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.957657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.957681 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:03 crc kubenswrapper[4856]: I1202 00:07:03.957696 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:03Z","lastTransitionTime":"2025-12-02T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.049658 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.049863 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.050134 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.050233 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:36.050206644 +0000 UTC m=+83.076574678 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.050550 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:07:36.050535673 +0000 UTC m=+83.076903707 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.061464 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.061522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.061544 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.061576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.061633 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.151019 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.151171 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.151229 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.151469 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.151506 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.151530 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.151674 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:36.151638303 +0000 UTC m=+83.178006347 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152219 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152419 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:36.152343091 +0000 UTC m=+83.178711245 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152583 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152678 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152705 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.152773 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:36.152751742 +0000 UTC m=+83.179119996 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.165358 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.165415 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.165432 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.165455 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.165474 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.252304 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.252323 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.252475 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.252574 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.252779 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.253026 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.268459 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.268523 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.268543 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.268573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.268621 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.371661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.371902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.371964 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.372060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.372127 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.475630 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.475922 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.475998 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.476065 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.476164 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.579539 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.579672 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.579698 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.579736 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.579761 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.590209 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/2.log" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.597193 4856 scope.go:117] "RemoveContainer" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" Dec 02 00:07:04 crc kubenswrapper[4856]: E1202 00:07:04.597518 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.615709 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.630072 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.651277 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.668123 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.682754 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.682824 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.682848 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.682883 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.682907 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.692924 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.709481 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.725553 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.739356 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.754685 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.768376 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.780333 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.785535 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.785577 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.785615 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.785635 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.785648 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.792366 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.802578 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.820631 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.833741 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.852082 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.873418 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889108 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:04Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889534 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.889624 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.992301 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.992555 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.992725 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.992873 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:04 crc kubenswrapper[4856]: I1202 00:07:04.993002 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:04Z","lastTransitionTime":"2025-12-02T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.095564 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.095615 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.095627 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.095642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.095653 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.197662 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.197721 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.197740 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.197767 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.197785 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.252280 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.252559 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.300823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.300932 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.300956 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.300985 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.301009 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.403686 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.403751 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.403768 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.403794 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.403812 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.493242 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.493294 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.493316 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.493343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.493363 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.516555 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:05Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.522396 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.522464 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.522487 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.522517 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.522539 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.542685 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:05Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.547063 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.547119 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.547144 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.547172 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.547194 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.567075 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:05Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.571982 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.572033 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.572053 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.572081 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.572103 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.592107 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:05Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.596481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.596532 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.596552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.596576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.596626 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.611014 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:05Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:05 crc kubenswrapper[4856]: E1202 00:07:05.611246 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.612937 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.612993 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.613006 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.613021 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.613031 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.715101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.715142 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.715153 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.715179 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.715191 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.818438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.818488 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.818504 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.818578 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.818628 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.920869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.920924 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.920939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.920961 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:05 crc kubenswrapper[4856]: I1202 00:07:05.920976 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:05Z","lastTransitionTime":"2025-12-02T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.023762 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.023790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.023797 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.023811 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.023820 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.126881 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.126922 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.126930 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.126947 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.126957 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.230387 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.230421 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.230431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.230446 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.230455 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.251901 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.251942 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:06 crc kubenswrapper[4856]: E1202 00:07:06.252380 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.252431 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:06 crc kubenswrapper[4856]: E1202 00:07:06.252581 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:06 crc kubenswrapper[4856]: E1202 00:07:06.252692 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.333201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.333307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.333319 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.333333 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.333341 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.435474 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.435543 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.435567 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.435628 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.435656 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.538512 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.538572 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.538623 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.538654 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.538672 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.641548 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.641610 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.641622 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.641636 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.641645 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.743571 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.743638 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.743647 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.743663 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.743676 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.845911 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.845954 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.845965 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.845983 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.845995 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.948698 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.948731 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.948740 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.948756 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:06 crc kubenswrapper[4856]: I1202 00:07:06.948767 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:06Z","lastTransitionTime":"2025-12-02T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.050925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.050962 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.050975 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.050992 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.051002 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.152993 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.153031 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.153044 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.153060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.153071 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.251899 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:07 crc kubenswrapper[4856]: E1202 00:07:07.252101 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.256908 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.256950 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.256961 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.256977 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.256990 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.358853 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.358880 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.358888 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.358902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.358913 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.461323 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.461359 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.461370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.461384 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.461396 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.563844 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.563888 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.563895 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.563910 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.563919 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.666622 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.666674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.666685 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.666705 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.666717 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.769120 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.769169 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.769178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.769195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.769219 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.871231 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.871295 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.871307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.871326 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.871338 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.975374 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.975409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.975417 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.975431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:07 crc kubenswrapper[4856]: I1202 00:07:07.975440 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:07Z","lastTransitionTime":"2025-12-02T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.077094 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.077129 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.077138 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.077153 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.077162 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.179578 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.179646 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.179660 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.179680 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.179696 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.251483 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.251537 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:08 crc kubenswrapper[4856]: E1202 00:07:08.251606 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.251621 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:08 crc kubenswrapper[4856]: E1202 00:07:08.251715 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:08 crc kubenswrapper[4856]: E1202 00:07:08.251767 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.282271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.282306 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.282317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.282331 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.282344 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.385041 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.385065 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.385073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.385086 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.385095 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.487975 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.488031 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.488054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.488090 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.488126 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.595012 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.595050 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.595127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.595147 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.595199 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.698095 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.698125 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.698135 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.698151 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.698162 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.800785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.800828 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.800838 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.800853 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.800863 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.902973 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.903015 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.903027 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.903043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:08 crc kubenswrapper[4856]: I1202 00:07:08.903053 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:08Z","lastTransitionTime":"2025-12-02T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.005067 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.005105 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.005113 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.005127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.005137 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.107260 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.107294 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.107302 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.107316 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.107325 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.210070 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.210104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.210112 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.210126 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.210135 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.251732 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:09 crc kubenswrapper[4856]: E1202 00:07:09.251914 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.312043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.312092 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.312105 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.312123 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.312136 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.414314 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.414357 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.414368 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.414385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.414399 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.516274 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.516315 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.516326 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.516343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.516353 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.618050 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.618116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.618124 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.618137 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.618145 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.721116 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.721168 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.721184 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.721207 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.721223 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.823699 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.823793 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.823819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.823855 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.823883 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.926424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.926458 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.926467 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.926481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:09 crc kubenswrapper[4856]: I1202 00:07:09.926491 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:09Z","lastTransitionTime":"2025-12-02T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.029317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.029379 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.029398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.029423 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.029441 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.132491 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.132550 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.132568 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.132616 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.132634 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.234716 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.234771 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.234790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.234815 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.234833 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.252276 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.252318 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:10 crc kubenswrapper[4856]: E1202 00:07:10.252525 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.252551 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:10 crc kubenswrapper[4856]: E1202 00:07:10.253080 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:10 crc kubenswrapper[4856]: E1202 00:07:10.252979 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.338143 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.338252 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.338274 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.338299 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.338317 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.440962 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.441001 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.441011 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.441027 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.441039 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.543021 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.543052 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.543061 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.543075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.543086 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.645803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.645865 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.645882 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.645907 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.645928 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.748435 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.748483 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.748494 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.748510 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.748519 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.851726 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.851771 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.851783 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.851803 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.851815 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.954937 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.954983 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.954996 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.955015 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:10 crc kubenswrapper[4856]: I1202 00:07:10.955026 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:10Z","lastTransitionTime":"2025-12-02T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.056810 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.056873 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.056891 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.056915 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.056940 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.159416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.159485 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.159503 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.159525 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.159544 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.251836 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:11 crc kubenswrapper[4856]: E1202 00:07:11.252582 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.260821 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.260861 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.260869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.260885 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.260895 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.363859 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.363925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.363941 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.363970 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.363986 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.466773 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.466839 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.466856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.466883 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.466908 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.569893 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.569947 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.569964 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.569987 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.570005 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.672573 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.672642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.672654 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.672674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.672687 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.775309 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.775365 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.775376 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.775398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.775412 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.878703 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.878782 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.878805 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.878835 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.878858 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.982088 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.982178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.982622 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.982657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:11 crc kubenswrapper[4856]: I1202 00:07:11.982675 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:11Z","lastTransitionTime":"2025-12-02T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.086702 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.086753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.086763 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.086780 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.086792 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.189378 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.189519 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.189549 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.189571 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.189584 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.251706 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.251780 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.251706 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:12 crc kubenswrapper[4856]: E1202 00:07:12.251854 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:12 crc kubenswrapper[4856]: E1202 00:07:12.251920 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:12 crc kubenswrapper[4856]: E1202 00:07:12.252051 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.292319 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.292372 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.292388 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.292413 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.292429 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.395265 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.395350 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.395373 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.395407 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.395430 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.497936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.497964 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.497972 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.498003 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.498013 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.600311 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.600344 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.600355 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.600370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.600380 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.703731 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.703771 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.703785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.703807 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.703824 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.806609 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.806922 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.807002 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.807067 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.807133 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.909773 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.910183 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.910397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.910650 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:12 crc kubenswrapper[4856]: I1202 00:07:12.910822 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:12Z","lastTransitionTime":"2025-12-02T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.013876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.013924 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.013936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.013953 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.013966 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.116257 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.116656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.116833 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.117000 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.117164 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.219992 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.220049 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.220066 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.220089 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.220106 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.252045 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:13 crc kubenswrapper[4856]: E1202 00:07:13.252151 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.268512 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.285381 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.297835 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.311951 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.321941 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.321983 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.321995 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.322014 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.322028 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.329905 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.346201 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.370231 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.389768 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.407292 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.424877 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.424939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.424959 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.424991 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.425013 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.429720 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.446433 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.462830 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.477371 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.493283 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.504095 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.526982 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.527671 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.527709 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.527728 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.527751 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.527769 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.542391 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.552920 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:13Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.629084 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.629125 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.629137 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.629154 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.629167 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.732642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.733198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.733283 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.733360 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.733433 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.837320 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.837667 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.837680 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.837699 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.837711 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.940321 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.940381 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.940408 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.940439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:13 crc kubenswrapper[4856]: I1202 00:07:13.940463 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:13Z","lastTransitionTime":"2025-12-02T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.043763 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.043813 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.043831 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.043852 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.043867 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.146288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.146329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.146340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.146358 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.146369 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.250012 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.250071 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.250096 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.250124 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.250145 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.251287 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.251303 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:14 crc kubenswrapper[4856]: E1202 00:07:14.251396 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.251290 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:14 crc kubenswrapper[4856]: E1202 00:07:14.251566 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:14 crc kubenswrapper[4856]: E1202 00:07:14.251558 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.352556 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.352604 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.352616 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.352631 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.352643 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.454232 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.454262 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.454270 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.454283 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.454292 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.556818 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.556868 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.556884 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.556902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.556915 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.659439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.659490 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.659500 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.659517 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.659531 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.762332 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.762403 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.762426 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.762458 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.762478 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.864904 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.864954 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.864975 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.865000 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.865019 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.967752 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.967826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.967848 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.967880 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:14 crc kubenswrapper[4856]: I1202 00:07:14.967904 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:14Z","lastTransitionTime":"2025-12-02T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.070645 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.070691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.070702 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.070721 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.070733 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.172317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.172355 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.172364 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.172379 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.172389 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.251470 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.251684 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.274598 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.274630 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.274639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.274653 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.274663 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.376222 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.376262 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.376274 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.376290 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.376301 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.478801 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.478833 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.478844 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.478856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.478864 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.581195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.581242 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.581254 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.581275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.581287 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.683089 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.683122 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.683131 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.683149 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.683159 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.784936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.784989 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.785005 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.785030 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.785047 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.830778 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.830809 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.830818 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.830832 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.830841 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.848234 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:15Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.852471 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.852585 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.852654 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.852683 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.852745 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.875877 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:15Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.882662 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.882720 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.882741 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.882772 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.882798 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.904554 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:15Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.913235 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.913332 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.913357 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.913390 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.913412 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.932236 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:15Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.936364 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.936403 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.936412 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.936427 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.936436 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.946442 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:15Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:15 crc kubenswrapper[4856]: E1202 00:07:15.946581 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.948352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.948400 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.948416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.948435 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:15 crc kubenswrapper[4856]: I1202 00:07:15.948448 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:15Z","lastTransitionTime":"2025-12-02T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.050429 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.050524 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.050536 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.050552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.050562 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.152882 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.152943 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.152954 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.152971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.152982 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.252153 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:16 crc kubenswrapper[4856]: E1202 00:07:16.252286 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.252172 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.252151 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:16 crc kubenswrapper[4856]: E1202 00:07:16.252367 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:16 crc kubenswrapper[4856]: E1202 00:07:16.252533 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.255726 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.255766 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.255781 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.255798 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.255810 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.357953 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.358017 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.358034 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.358058 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.358077 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.460552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.461067 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.461147 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.461212 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.461273 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.563353 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.563411 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.563426 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.563451 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.563469 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.665235 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.665280 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.665289 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.665304 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.665316 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.767656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.767717 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.767734 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.767759 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.767778 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.870052 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.870345 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.870578 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.870867 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.871098 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.973783 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.974089 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.974260 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.974454 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:16 crc kubenswrapper[4856]: I1202 00:07:16.974700 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:16Z","lastTransitionTime":"2025-12-02T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.077286 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.077327 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.077337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.077352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.077362 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.179869 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.180727 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.180914 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.181075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.181215 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.252313 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:17 crc kubenswrapper[4856]: E1202 00:07:17.252511 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.253146 4856 scope.go:117] "RemoveContainer" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" Dec 02 00:07:17 crc kubenswrapper[4856]: E1202 00:07:17.253386 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.284477 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.284809 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.284960 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.285115 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.285247 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.387889 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.387922 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.387932 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.387949 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.387961 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.490768 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.490806 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.490819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.490836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.490848 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.593672 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.593726 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.593741 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.593761 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.593778 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.696981 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.697037 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.697049 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.697089 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.697102 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.800331 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.800370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.800380 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.800400 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.800417 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.902247 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.902286 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.902297 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.902313 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:17 crc kubenswrapper[4856]: I1202 00:07:17.902324 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:17Z","lastTransitionTime":"2025-12-02T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.004875 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.004902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.004911 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.004926 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.004935 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.107313 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.107339 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.107347 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.107360 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.107368 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.209292 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.209337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.209346 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.209359 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.209367 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.251282 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:18 crc kubenswrapper[4856]: E1202 00:07:18.251384 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.251528 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:18 crc kubenswrapper[4856]: E1202 00:07:18.251568 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.251677 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:18 crc kubenswrapper[4856]: E1202 00:07:18.251719 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.311711 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.311744 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.311753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.311767 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.311776 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.414352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.414471 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.414494 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.414551 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.414573 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.517362 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.517418 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.517431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.517448 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.517459 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.619194 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.619240 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.619251 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.619269 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.619283 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.721370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.721404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.721415 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.721429 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.721439 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.823351 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.823381 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.823389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.823402 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.823410 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.925479 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.925555 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.925565 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.925581 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:18 crc kubenswrapper[4856]: I1202 00:07:18.925608 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:18Z","lastTransitionTime":"2025-12-02T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.027385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.027421 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.027429 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.027443 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.027451 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.130043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.130085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.130094 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.130108 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.130118 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.155670 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:19 crc kubenswrapper[4856]: E1202 00:07:19.155874 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:19 crc kubenswrapper[4856]: E1202 00:07:19.155950 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:07:51.155932125 +0000 UTC m=+98.182300189 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.232268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.232320 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.232332 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.232352 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.232364 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.251957 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:19 crc kubenswrapper[4856]: E1202 00:07:19.252209 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.334980 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.335027 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.335039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.335060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.335072 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.437374 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.437425 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.437436 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.437453 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.437463 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.539921 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.539960 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.539971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.539986 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.539995 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.640540 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/0.log" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.640577 4856 generic.go:334] "Generic (PLEG): container finished" podID="536def47-c9d3-4c3e-9b4a-3776e034998b" containerID="def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129" exitCode=1 Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.640629 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerDied","Data":"def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.640986 4856 scope.go:117] "RemoveContainer" containerID="def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.641644 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.641674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.641683 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.641697 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.641706 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.652799 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.664215 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.678560 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.690641 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.702145 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.712491 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.722983 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.732724 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.742095 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.743453 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.743485 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.743501 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.743518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.743530 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.763664 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.776566 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.785972 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.803223 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.815572 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.828329 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.838980 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.845972 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.846270 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.846350 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.846436 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.846514 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.849540 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.858812 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:19Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.949073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.949277 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.949370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.949436 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:19 crc kubenswrapper[4856]: I1202 00:07:19.949495 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:19Z","lastTransitionTime":"2025-12-02T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.052158 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.052196 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.052205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.052220 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.052230 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.154786 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.154826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.154839 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.154856 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.154868 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.251461 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.251515 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.251547 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:20 crc kubenswrapper[4856]: E1202 00:07:20.251670 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:20 crc kubenswrapper[4856]: E1202 00:07:20.251726 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:20 crc kubenswrapper[4856]: E1202 00:07:20.251776 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.256988 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.257025 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.257034 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.257050 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.257059 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.359104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.359142 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.359152 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.359167 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.359178 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.461525 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.461562 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.461572 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.461602 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.461612 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.564064 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.564105 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.564114 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.564132 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.564141 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.645054 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/0.log" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.645118 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerStarted","Data":"7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.666607 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.666661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.666673 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.666690 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.666699 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.672260 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.695896 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.708539 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.727299 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.743966 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.761654 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.769642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.769681 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.769691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.769710 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.769720 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.774833 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.790262 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.809280 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.825319 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.842387 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.856033 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.871456 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.871500 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.871515 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.871533 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.871544 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.874019 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.886525 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.901110 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.913650 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.924224 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.944045 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:20Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.974020 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.974050 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.974061 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.974075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:20 crc kubenswrapper[4856]: I1202 00:07:20.974084 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:20Z","lastTransitionTime":"2025-12-02T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.076085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.076142 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.076160 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.076187 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.076206 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.178905 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.178995 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.179043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.179072 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.179091 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.251843 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:21 crc kubenswrapper[4856]: E1202 00:07:21.252118 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.281009 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.281087 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.281101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.281117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.281130 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.384491 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.384552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.384576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.384647 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.384673 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.487103 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.487166 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.487188 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.487218 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.487239 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.589409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.589450 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.589461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.589478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.589489 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.691385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.691424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.691435 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.691449 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.691459 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.793330 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.793361 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.793370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.793386 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.793395 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.897126 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.897158 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.897168 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.897182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:21 crc kubenswrapper[4856]: I1202 00:07:21.897192 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:21Z","lastTransitionTime":"2025-12-02T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:21.999983 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.000065 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.000080 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.000097 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.000109 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.106744 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.106780 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.106790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.106806 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.106818 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.208754 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.208784 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.208792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.208806 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.208815 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.251439 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.251515 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.251438 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:22 crc kubenswrapper[4856]: E1202 00:07:22.251576 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:22 crc kubenswrapper[4856]: E1202 00:07:22.251671 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:22 crc kubenswrapper[4856]: E1202 00:07:22.251755 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.311303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.311342 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.311353 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.311368 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.311381 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.413517 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.413540 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.413548 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.413561 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.413569 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.515449 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.515481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.515492 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.515511 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.515523 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.617608 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.617661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.617673 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.617689 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.617700 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.719349 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.719440 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.719461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.719484 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.719501 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.821731 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.821766 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.821777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.821792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.821837 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.923791 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.923822 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.923830 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.923842 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:22 crc kubenswrapper[4856]: I1202 00:07:22.923852 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:22Z","lastTransitionTime":"2025-12-02T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.026011 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.026043 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.026051 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.026066 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.026076 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.127781 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.127832 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.127852 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.127874 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.127893 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.230107 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.230157 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.230173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.230196 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.230211 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.251542 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:23 crc kubenswrapper[4856]: E1202 00:07:23.251736 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.270384 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.287641 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.305070 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.316977 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.328204 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.332425 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.332475 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.332491 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.332513 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.332529 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.352467 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.367150 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.386859 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.404433 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.419178 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.431205 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.434885 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.434938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.434951 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.434967 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.434978 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.441788 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.454985 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.465972 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.480273 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.489918 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.499753 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.507753 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:23Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.538073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.538118 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.538140 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.538168 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.538190 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.640890 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.640943 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.640956 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.640974 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.640988 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.743258 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.743313 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.743322 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.743338 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.743348 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.846277 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.846335 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.846358 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.846389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.846411 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.949464 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.949530 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.949547 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.949571 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:23 crc kubenswrapper[4856]: I1202 00:07:23.949615 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:23Z","lastTransitionTime":"2025-12-02T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.052421 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.052457 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.052466 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.052480 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.052491 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.154906 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.154967 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.154989 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.155017 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.155039 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.251621 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.251839 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.251890 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:24 crc kubenswrapper[4856]: E1202 00:07:24.252037 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:24 crc kubenswrapper[4856]: E1202 00:07:24.252191 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:24 crc kubenswrapper[4856]: E1202 00:07:24.252336 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.258097 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.258159 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.258181 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.258204 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.258225 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.264323 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.361096 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.361181 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.361205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.361235 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.361256 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.464101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.464160 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.464177 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.464201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.464218 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.567038 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.567083 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.567092 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.567107 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.567116 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.669371 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.669432 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.669452 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.669481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.669498 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.772168 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.772243 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.772262 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.772293 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.772317 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.874294 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.874346 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.874356 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.874371 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.874381 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.976991 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.977041 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.977050 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.977066 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:24 crc kubenswrapper[4856]: I1202 00:07:24.977077 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:24Z","lastTransitionTime":"2025-12-02T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.080201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.080236 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.080246 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.080259 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.080269 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.183356 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.183437 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.183451 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.183472 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.183485 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.251328 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:25 crc kubenswrapper[4856]: E1202 00:07:25.251532 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.286462 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.286545 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.286564 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.286629 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.286675 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.388576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.388639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.388649 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.388665 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.388673 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.490719 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.490766 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.490776 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.490791 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.490802 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.594178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.594218 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.594227 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.594243 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.594252 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.698024 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.698091 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.698112 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.698141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.698162 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.800213 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.800287 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.800305 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.800337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.800357 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.903760 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.903808 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.903819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.903836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:25 crc kubenswrapper[4856]: I1202 00:07:25.903845 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:25Z","lastTransitionTime":"2025-12-02T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.007012 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.007069 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.007079 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.007095 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.007108 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.075988 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.076054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.076071 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.076100 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.076118 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.096628 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:26Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.103324 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.103396 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.103416 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.103447 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.103469 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.123040 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:26Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.128150 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.128243 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.128271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.128311 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.128342 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.146924 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:26Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.152141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.152186 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.152198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.152224 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.152239 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.171790 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:26Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.177182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.177259 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.177282 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.177309 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.177327 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.199187 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:26Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.199436 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.201691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.201745 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.201759 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.201778 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.201793 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.251797 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.251898 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.251995 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.252092 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.252271 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:26 crc kubenswrapper[4856]: E1202 00:07:26.252365 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.304668 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.304732 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.304746 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.304773 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.304793 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.408057 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.408154 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.408172 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.408208 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.408229 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.511767 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.511851 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.511872 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.511901 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.511921 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.614560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.614656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.614670 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.614686 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.614702 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.718151 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.718260 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.718286 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.718323 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.718346 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.825629 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.825685 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.825699 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.825721 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.825742 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.928408 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.928469 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.928483 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.928506 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:26 crc kubenswrapper[4856]: I1202 00:07:26.928520 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:26Z","lastTransitionTime":"2025-12-02T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.031337 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.031391 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.031408 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.031431 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.031502 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.134108 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.134332 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.134462 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.134663 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.134809 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.236929 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.237139 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.237275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.237419 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.237559 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.251192 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:27 crc kubenswrapper[4856]: E1202 00:07:27.251509 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.340618 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.340898 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.341115 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.341351 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.341553 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.444356 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.444414 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.444438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.444468 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.444490 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.547650 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.547707 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.547725 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.547753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.547770 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.652952 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.653003 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.653014 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.653034 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.653046 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.756118 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.756193 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.756217 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.756248 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.756267 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.859409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.859439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.859448 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.859461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.859471 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.962424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.962468 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.962479 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.962501 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:27 crc kubenswrapper[4856]: I1202 00:07:27.962513 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:27Z","lastTransitionTime":"2025-12-02T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.065783 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.065846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.065868 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.065892 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.065912 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.169226 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.169288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.169307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.169334 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.169352 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.251840 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.251958 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:28 crc kubenswrapper[4856]: E1202 00:07:28.252021 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:28 crc kubenswrapper[4856]: E1202 00:07:28.252148 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.252245 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:28 crc kubenswrapper[4856]: E1202 00:07:28.252478 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.271777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.271811 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.271823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.271836 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.271848 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.374164 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.374230 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.374256 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.374288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.374314 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.477104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.477145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.477157 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.477173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.477184 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.579790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.579849 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.579864 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.579886 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.579901 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.682377 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.682460 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.682473 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.682489 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.682501 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.785326 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.785392 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.785417 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.785450 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.785470 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.888859 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.888920 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.888939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.888961 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.888980 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.992528 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.992675 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.992705 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.992738 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:28 crc kubenswrapper[4856]: I1202 00:07:28.992765 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:28Z","lastTransitionTime":"2025-12-02T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.095676 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.095734 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.095752 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.095777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.095794 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.198945 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.199022 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.199046 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.199077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.199098 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.252326 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:29 crc kubenswrapper[4856]: E1202 00:07:29.252528 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.303060 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.303103 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.303117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.303135 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.303147 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.405619 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.405677 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.405694 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.405721 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.405738 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.513785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.513871 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.513897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.513936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.513955 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.617971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.618069 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.618096 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.618132 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.618167 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.720749 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.720808 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.720823 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.720844 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.720858 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.823497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.823543 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.823552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.823567 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.823577 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.926021 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.926054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.926062 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.926077 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:29 crc kubenswrapper[4856]: I1202 00:07:29.926086 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:29Z","lastTransitionTime":"2025-12-02T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.028301 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.028345 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.028379 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.028417 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.028429 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.130342 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.130405 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.130425 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.130451 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.130469 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.233722 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.233759 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.233769 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.233785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.233799 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.251531 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.251566 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.251574 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:30 crc kubenswrapper[4856]: E1202 00:07:30.251721 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:30 crc kubenswrapper[4856]: E1202 00:07:30.251829 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:30 crc kubenswrapper[4856]: E1202 00:07:30.252072 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.336489 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.336560 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.336583 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.336655 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.336679 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.440173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.440211 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.440222 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.440239 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.440254 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.543303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.543343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.543355 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.543370 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.543381 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.645707 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.645790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.645811 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.645832 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.645847 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.747779 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.747849 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.747862 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.747878 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.747888 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.850867 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.850951 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.850978 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.851016 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.851039 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.955101 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.955167 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.955198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.955233 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:30 crc kubenswrapper[4856]: I1202 00:07:30.955253 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:30Z","lastTransitionTime":"2025-12-02T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.059291 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.059363 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.059389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.059424 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.059444 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.162709 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.162762 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.162777 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.162800 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.162823 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.252346 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:31 crc kubenswrapper[4856]: E1202 00:07:31.252564 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.266099 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.266180 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.266207 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.266237 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.266263 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.369775 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.369859 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.369876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.369901 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.369920 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.473734 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.473816 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.473838 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.473872 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.473899 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.576448 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.576543 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.576562 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.576618 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.576641 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.680767 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.680883 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.680908 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.680942 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.680964 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.785258 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.785308 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.785326 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.785355 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.785375 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.888548 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.888670 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.888700 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.888734 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.888759 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.991918 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.991995 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.992018 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.992046 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:31 crc kubenswrapper[4856]: I1202 00:07:31.992064 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:31Z","lastTransitionTime":"2025-12-02T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.096059 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.096117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.096131 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.096151 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.096164 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.198688 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.198729 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.198738 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.198753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.198762 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.252069 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.252172 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:32 crc kubenswrapper[4856]: E1202 00:07:32.252199 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.252197 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:32 crc kubenswrapper[4856]: E1202 00:07:32.252285 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:32 crc kubenswrapper[4856]: E1202 00:07:32.252431 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.253046 4856 scope.go:117] "RemoveContainer" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.301002 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.301080 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.301104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.301136 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.301158 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.404195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.404278 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.404297 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.404329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.404355 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.506914 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.506953 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.506962 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.506977 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.506986 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.609852 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.609939 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.609958 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.609992 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.610010 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.713109 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.713163 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.713182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.713207 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.713223 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.816781 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.816844 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.816859 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.816880 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.816897 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.920073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.920137 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.920156 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.920184 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:32 crc kubenswrapper[4856]: I1202 00:07:32.920204 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:32Z","lastTransitionTime":"2025-12-02T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.023454 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.023535 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.023557 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.023618 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.023641 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.127085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.127133 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.127145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.127166 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.127178 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.230098 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.230161 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.230178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.230205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.230222 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.251734 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:33 crc kubenswrapper[4856]: E1202 00:07:33.251978 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.268756 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d42838da-fa62-4095-9c01-86471a6e4e06\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.285417 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.301775 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.325043 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.332395 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.332447 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.332462 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.332490 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.332506 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.341975 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.360893 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.376149 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.389132 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.404654 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.423849 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.434630 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.434666 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.434676 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.434691 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.434700 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.446332 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.460433 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.472164 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.504731 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.516552 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.529618 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.536966 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.537001 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.537011 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.537026 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.537039 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.539740 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.551037 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.559644 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.638893 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.639127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.639140 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.639156 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.639165 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.690209 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/2.log" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.692733 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.693119 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.702173 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.717385 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.727615 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.737390 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.741068 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.741117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.741130 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.741148 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.741160 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.749956 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.760021 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.769807 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.780217 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.790393 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.805966 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.830419 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.844285 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.844357 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.844375 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.844400 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.844419 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.854965 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.869732 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.883054 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.893522 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.901917 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d42838da-fa62-4095-9c01-86471a6e4e06\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.920713 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.936706 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.947156 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.947212 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.947233 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.947257 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.947275 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:33Z","lastTransitionTime":"2025-12-02T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:33 crc kubenswrapper[4856]: I1202 00:07:33.953304 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:33Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.049915 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.049997 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.050020 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.050052 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.050075 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.153295 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.153340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.153374 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.153393 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.153404 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.274819 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.274850 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.274862 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:34 crc kubenswrapper[4856]: E1202 00:07:34.275002 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:34 crc kubenswrapper[4856]: E1202 00:07:34.275196 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:34 crc kubenswrapper[4856]: E1202 00:07:34.275340 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.276456 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.276513 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.276531 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.276558 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.276576 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.378914 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.378990 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.379015 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.379046 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.379070 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.481469 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.481534 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.481556 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.481582 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.481622 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.584532 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.584575 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.584607 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.584624 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.584640 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.687115 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.687169 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.687186 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.687210 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.687228 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.699177 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/3.log" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.700102 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/2.log" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.703818 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" exitCode=1 Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.703856 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.703889 4856 scope.go:117] "RemoveContainer" containerID="bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.705138 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:07:34 crc kubenswrapper[4856]: E1202 00:07:34.705430 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.733653 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.750616 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.771106 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.789332 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.791030 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.791091 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.791112 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.791137 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.791152 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.802896 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.832836 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd3e69c1fbdd3aad231b68925ca080aa7ab6fb7f6d17060007dba00467942ebf\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:03Z\\\",\\\"message\\\":\\\"nshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 00:07:03.167930 6498 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1202 00:07:03.167936 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167940 6498 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1202 00:07:03.167263 6498 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1202 00:07:03.167948 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1202 00:07:03.167951 6498 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1202 00:07:03.167937 6498 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:02Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:34Z\\\",\\\"message\\\":\\\"ice for network=default are: map[]\\\\nI1202 00:07:33.849460 6885 services_controller.go:443] Built service openshift-operator-lifecycle-manager/packageserver-service LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.153\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:5443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1202 00:07:33.849485 6885 services_controller.go:444] Built service openshift-operator-lifecycle-manager/packageserver-service LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1202 00:07:33.849499 6885 services_controller.go:445] Built service openshift-operator-lifecycle-manager/packageserver-service LB template configs for network=default: []services.lbConfig(nil)\\\\nF1202 00:07:33.849539 6885 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controlle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.848943 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.861666 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.893688 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.894895 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.894937 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.894956 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.894982 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.895001 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.913554 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.931661 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.948854 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.965402 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.979866 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.993162 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d42838da-fa62-4095-9c01-86471a6e4e06\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:34Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.997455 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.997519 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.997539 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.997567 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:34 crc kubenswrapper[4856]: I1202 00:07:34.997585 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:34Z","lastTransitionTime":"2025-12-02T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.013960 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.031503 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.055013 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.076429 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.100624 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.100681 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.100695 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.100714 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.100728 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.203897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.203948 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.203965 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.203990 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.204007 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.251541 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:35 crc kubenswrapper[4856]: E1202 00:07:35.251779 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.306461 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.306613 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.306704 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.306794 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.306874 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.410826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.410891 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.410909 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.410934 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.410953 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.513198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.513271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.513291 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.513323 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.513345 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.616236 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.616306 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.616331 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.616363 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.616385 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.710848 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/3.log" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.716230 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:07:35 crc kubenswrapper[4856]: E1202 00:07:35.716495 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.718876 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.718925 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.718944 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.718969 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.718988 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.735122 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.753340 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.783871 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:34Z\\\",\\\"message\\\":\\\"ice for network=default are: map[]\\\\nI1202 00:07:33.849460 6885 services_controller.go:443] Built service openshift-operator-lifecycle-manager/packageserver-service LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.153\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:5443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1202 00:07:33.849485 6885 services_controller.go:444] Built service openshift-operator-lifecycle-manager/packageserver-service LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1202 00:07:33.849499 6885 services_controller.go:445] Built service openshift-operator-lifecycle-manager/packageserver-service LB template configs for network=default: []services.lbConfig(nil)\\\\nF1202 00:07:33.849539 6885 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controlle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.806007 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.821647 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.822054 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.822109 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.822134 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.822164 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.822182 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.835694 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.854648 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.870277 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.883126 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.898656 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.915166 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.924938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.925007 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.925031 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.925061 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.925086 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:35Z","lastTransitionTime":"2025-12-02T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.947627 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.966450 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:35 crc kubenswrapper[4856]: I1202 00:07:35.983781 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:35Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.005080 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.025511 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.028475 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.028522 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.028538 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.028563 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.028584 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.040337 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d42838da-fa62-4095-9c01-86471a6e4e06\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.059005 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.076217 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.094005 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.094264 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:40.094226636 +0000 UTC m=+147.120594670 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.094705 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.094869 4856 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.095185 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:08:40.0951574 +0000 UTC m=+147.121525434 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.131913 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.131973 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.131991 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.132015 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.132033 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.195573 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.195740 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.195790 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.195949 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.195998 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196024 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196045 4856 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196047 4856 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.195986 4856 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196070 4856 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196220 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 00:08:40.196126307 +0000 UTC m=+147.222494341 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196254 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 00:08:40.19624079 +0000 UTC m=+147.222608824 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.196275 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 00:08:40.19626391 +0000 UTC m=+147.222631944 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.235696 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.235763 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.235784 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.235815 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.235836 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.252082 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.252134 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.252100 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.252243 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.252390 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.252651 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.339120 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.339181 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.339198 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.339225 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.339241 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.351237 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.351274 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.351286 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.351303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.351315 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.365793 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.370339 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.370405 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.370439 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.370459 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.370470 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.385133 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.389306 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.389363 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.389381 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.389404 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.389422 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.409660 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.413556 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.413625 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.413640 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.413658 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.413672 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.436762 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.441024 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.441093 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.441115 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.441145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.441165 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.460910 4856 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5c7b2cda-4eb3-4a4c-9166-bf25d0cae556\\\",\\\"systemUUID\\\":\\\"3d1824ac-8d4d-4481-a69e-2d81f0b86b53\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:36Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:36 crc kubenswrapper[4856]: E1202 00:07:36.461137 4856 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.462747 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.462813 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.462840 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.462870 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.462892 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.566062 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.566119 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.566135 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.566158 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.566178 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.668627 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.668674 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.668688 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.668709 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.668722 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.771780 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.771843 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.771866 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.771926 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.771953 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.874958 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.875022 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.875041 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.875064 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.875080 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.977881 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.977949 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.977966 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.977991 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:36 crc kubenswrapper[4856]: I1202 00:07:36.978009 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:36Z","lastTransitionTime":"2025-12-02T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.080780 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.080902 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.080921 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.080944 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.080961 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.184256 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.184303 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.184315 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.184330 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.184340 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.252317 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:37 crc kubenswrapper[4856]: E1202 00:07:37.252536 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.286819 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.286896 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.286920 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.286952 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.286970 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.390111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.390173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.390192 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.390219 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.390242 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.493136 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.493200 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.493218 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.493245 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.493264 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.596727 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.596806 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.596832 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.596863 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.596884 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.699927 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.699971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.699982 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.699999 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.700010 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.802443 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.802479 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.802492 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.802512 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.802523 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.905632 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.905681 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.905693 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.905713 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:37 crc kubenswrapper[4856]: I1202 00:07:37.905727 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:37Z","lastTransitionTime":"2025-12-02T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.008973 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.009039 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.009061 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.009090 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.009141 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.111857 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.111919 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.111938 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.111966 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.111985 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.214701 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.214741 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.214754 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.214770 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.214779 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.251836 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.252020 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:38 crc kubenswrapper[4856]: E1202 00:07:38.252237 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.252256 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:38 crc kubenswrapper[4856]: E1202 00:07:38.252380 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:38 crc kubenswrapper[4856]: E1202 00:07:38.252537 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.317288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.317340 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.317356 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.317382 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.317398 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.419582 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.419688 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.419716 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.419746 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.419767 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.522035 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.522098 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.522109 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.522123 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.522133 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.624095 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.624164 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.624176 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.624192 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.624203 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.727108 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.727161 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.727179 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.727201 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.727219 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.830863 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.830933 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.830952 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.830976 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.830992 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.934576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.934644 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.934657 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.934696 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:38 crc kubenswrapper[4856]: I1202 00:07:38.934710 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:38Z","lastTransitionTime":"2025-12-02T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.037862 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.037918 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.037936 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.037959 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.037977 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.141221 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.141290 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.141308 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.141336 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.141353 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.245010 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.245070 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.245095 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.245128 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.245150 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.251231 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:39 crc kubenswrapper[4856]: E1202 00:07:39.251419 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.348030 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.348078 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.348092 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.348111 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.348123 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.451013 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.451141 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.451157 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.451175 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.451188 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.554345 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.554398 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.554409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.554426 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.554437 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.658082 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.658127 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.658145 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.658167 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.658183 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.760765 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.760848 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.760871 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.760903 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.760927 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.863250 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.863292 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.863305 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.863323 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.863335 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.965858 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.965919 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.965932 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.965948 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:39 crc kubenswrapper[4856]: I1202 00:07:39.965957 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:39Z","lastTransitionTime":"2025-12-02T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.068241 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.068268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.068275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.068288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.068297 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.171490 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.171567 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.171635 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.171673 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.171695 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.252289 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.252364 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.252319 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:40 crc kubenswrapper[4856]: E1202 00:07:40.252465 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:40 crc kubenswrapper[4856]: E1202 00:07:40.252644 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:40 crc kubenswrapper[4856]: E1202 00:07:40.252701 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.274187 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.274234 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.274246 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.274263 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.274275 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.377826 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.377882 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.377897 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.377917 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.377928 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.481324 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.481378 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.481397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.481420 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.481438 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.584438 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.584501 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.584521 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.584551 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.584575 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.687305 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.687363 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.687375 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.687395 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.687407 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.790383 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.790454 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.790472 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.790497 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.790515 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.894916 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.894976 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.894999 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.895027 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.895060 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.998430 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.998661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.998702 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.998727 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:40 crc kubenswrapper[4856]: I1202 00:07:40.998745 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:40Z","lastTransitionTime":"2025-12-02T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.102119 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.102181 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.102199 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.102226 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.102244 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.204887 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.204954 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.204972 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.204997 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.205018 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.251935 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:41 crc kubenswrapper[4856]: E1202 00:07:41.252147 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.307140 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.307217 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.307241 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.307271 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.307297 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.410647 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.410732 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.410755 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.410785 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.410805 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.513751 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.514179 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.514208 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.514236 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.514254 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.617114 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.617155 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.617164 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.617180 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.617190 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.720211 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.720256 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.720267 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.720283 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.720293 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.823713 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.823771 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.823789 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.823814 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.823832 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.926703 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.926809 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.926827 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.926851 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:41 crc kubenswrapper[4856]: I1202 00:07:41.926871 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:41Z","lastTransitionTime":"2025-12-02T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.030292 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.030362 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.030381 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.030406 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.030424 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.133979 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.134053 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.134076 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.134108 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.134129 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.239121 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.239193 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.239218 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.239249 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.239270 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.251442 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.251512 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.251466 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:42 crc kubenswrapper[4856]: E1202 00:07:42.251720 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:42 crc kubenswrapper[4856]: E1202 00:07:42.251930 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:42 crc kubenswrapper[4856]: E1202 00:07:42.252112 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.342437 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.342547 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.342572 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.342631 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.342656 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.446718 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.446825 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.446850 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.446893 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.446917 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.550432 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.550498 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.550518 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.550587 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.550899 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.654605 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.654667 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.654683 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.654708 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.654725 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.757442 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.757531 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.757552 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.757626 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.757653 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.861879 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.862307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.862500 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.862713 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.862879 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.965973 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.966042 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.966059 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.966085 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:42 crc kubenswrapper[4856]: I1202 00:07:42.966107 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:42Z","lastTransitionTime":"2025-12-02T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.069559 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.069628 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.069639 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.069656 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.069667 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.172986 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.173766 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.173792 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.173810 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.173826 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.252755 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:43 crc kubenswrapper[4856]: E1202 00:07:43.253101 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.275418 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ade0bc15ca50d7577a6df47349572f4570524ba3cf176bd51f8df800586442d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.278996 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.279104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.279134 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.279166 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.279188 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.295750 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.316541 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.339171 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0271f00d-b420-4dee-aa8b-92d6fc294b2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://540d6d72c17323e31798c9770eb1770b33874c1d4b0a39eb19c622565d90953a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4brt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-455ww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.359561 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-p5j4l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee3e0398-8021-446b-b638-d498b5032575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a89b14b95b054b0185eefcd9b23d38782e31d1df8a09f3cb72536d31a4795730\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x56rh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-p5j4l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.378253 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b19beb5-329d-48ef-bce0-8e299b9a21c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://693c2f513a2b7d12bfa2c567c40bdc72fc5059614c9edf1b3b9784cf368612db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e7c9080ee52716f66a9635ece39df2447e9c43d749e8e284a43fb940bbdd8b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6467r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jzwq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.381268 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.381293 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.381305 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.381320 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.381329 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.394513 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cbedea3d-dea3-407d-aae3-2ac725bcab34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7xf44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4zvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.430113 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2c52f8c-bdd4-4b2d-b69f-9d62ff1c4665\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17bd488604507bfc761522c745543f27258984e7fc1227eafb61eac04cae3b4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0646d63e8141730adcee43befd45174e1c0262602ac3d9a407d3c75bdb2cd4b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aa8a2f3827edc9629a40d5aa75698d5e62f6a9606cec0ebb93fbeee7b2a6a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33a32067d3d429096016716a4b1341f0c8f17e3fe71dc0cebfb275f6294a2516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ab956a7edf15ac63b58b99f167e019004d03ff6e44879cf76a11bd86556da3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa609a9434e33eed97192dbd753ca4caa778ff8e6947f5e1cc44819c1d9b618e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8929abbd212e98b8489a3c33785adbfba6b09c8d2b99d5db5065369385862d1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03c6dd88ef3ac806c56e80eef460d707851267b111d8031851b441dd378bd7bd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.454490 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://476477be96a7ccaadb4165de632424046a9a83b63bcd3de6f7c2b483cac83526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://503cad102a2e30d1c95d531d0a38e40e8ee81c479369d497080b59406f45d3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.473901 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00a0c2197bc80d3debe4a862274e4cf13818ee9002eafc17695c68dde62af128\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.486383 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.486478 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.486508 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.486543 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.486578 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.506484 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qlft7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fa9541c-8427-40d2-aa5d-b53cb430bddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f399916295a928a42de4c42e499cf623ced181c67abfcde0af239317a00fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6bf91f136133aaf499649367e0e9674593ed944ee3508c406c5afd7b2a146d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://786a8c2f4fa1c2dcf5200cd5da4df9fd316b705ad54f5419e219a2c82bfdb482\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edb43eb8cc88649e32a9bf5004ebdc1e5d0eab6a8fd43d340c34e8e82d9fff3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://294770b521f421410a5e2d23bf2f0012a0538b5105bf95c0d8272bb475e4e70f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b3356baec636c1acac69e93afd484ff87d7e5c68d97e7295290f71c1cfe520b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca151e94d2695906148882cd5c6b5d32f5045df189e0aea95a9735ef104bf932\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6p6v6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qlft7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.529961 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5mfwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"536def47-c9d3-4c3e-9b4a-3776e034998b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:07:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:19Z\\\",\\\"message\\\":\\\"2025-12-02T00:06:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0\\\\n2025-12-02T00:06:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_46a99ace-6567-41a2-adf5-f22de7fc50f0 to /host/opt/cni/bin/\\\\n2025-12-02T00:06:34Z [verbose] multus-daemon started\\\\n2025-12-02T00:06:34Z [verbose] Readiness Indicator file check\\\\n2025-12-02T00:07:19Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:07:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zj72w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5mfwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.547018 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d42838da-fa62-4095-9c01-86471a6e4e06\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c76478795a82cd5f074d675122e144fd3f16335bc7da88c41620e586a2a7576b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc35a904c17ddb0753b827f46716d62c481e6882d6824fe898ad9a6a4449eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.567371 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.588471 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"acd644f8-9ce7-42e4-af58-11bb128c9974\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T00:06:31Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 00:06:25.617682 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 00:06:25.618765 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1422430807/tls.crt::/tmp/serving-cert-1422430807/tls.key\\\\\\\"\\\\nI1202 00:06:31.418389 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 00:06:31.420520 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 00:06:31.420602 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 00:06:31.420660 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 00:06:31.420687 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 00:06:31.426129 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 00:06:31.426228 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 00:06:31.426258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI1202 00:06:31.426162 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 00:06:31.426285 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 00:06:31.426346 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 00:06:31.426370 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 00:06:31.426394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 00:06:31.430848 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:15Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.589124 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.589178 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.589197 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.589221 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.589238 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.606681 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59312e00-584c-4b1d-9148-e0ec4c2dfab0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4969137cf60fccb75086b7aa6f66dca51d6db3e3b49deba1b5ecd3f35d967daf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://392490e6681da7d19655ef58c640b234085c1ff43d80cea0c78613cfa0c6a7a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e416b694bbfa89cc45862a18980da1884ef31b8519981402d99caa02eb99a239\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d13b8a5950495204a713cd3766aa2706719cf59584e52fd567948d70308872c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.625036 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rl6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5969950-38f4-420c-8824-e9164238cacf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c4ca38b21b63ee96dd0d4965a22f58958e6e04566e040f314d80076170f7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzv4w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rl6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.662170 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3051381c-49c8-4217-9831-013ca2931604\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T00:07:34Z\\\",\\\"message\\\":\\\"ice for network=default are: map[]\\\\nI1202 00:07:33.849460 6885 services_controller.go:443] Built service openshift-operator-lifecycle-manager/packageserver-service LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.153\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:5443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1202 00:07:33.849485 6885 services_controller.go:444] Built service openshift-operator-lifecycle-manager/packageserver-service LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1202 00:07:33.849499 6885 services_controller.go:445] Built service openshift-operator-lifecycle-manager/packageserver-service LB template configs for network=default: []services.lbConfig(nil)\\\\nF1202 00:07:33.849539 6885 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controlle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T00:07:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T00:06:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2g5ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l5jg6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.686187 4856 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2743fe0-4725-42f9-94a0-cb090b570904\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T00:06:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69900307fa18bb8c2655f389fffedc9bb9b45619577f6f83bd68620cfe0a2101\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ba1d4bd9879b2a9a55962e89d6492d70ae4b2aa464b10aa0127e1bbb5f31201\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5aeff91a0dc5dd1d3d3b041d871346050937a399fa61fd04ccd1673663f0e9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T00:06:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T00:06:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T00:07:43Z is after 2025-08-24T17:21:41Z" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.692281 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.692397 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.692419 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.692481 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.692500 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.795970 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.796044 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.796059 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.796108 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.796122 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.899970 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.900040 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.900062 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.900093 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:43 crc kubenswrapper[4856]: I1202 00:07:43.900114 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:43Z","lastTransitionTime":"2025-12-02T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.003409 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.003477 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.003499 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.003527 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.003547 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.107373 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.107530 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.107622 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.107661 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.107683 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.211297 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.211389 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.211413 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.211445 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.211474 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.295850 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:44 crc kubenswrapper[4856]: E1202 00:07:44.296050 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.296101 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:44 crc kubenswrapper[4856]: E1202 00:07:44.296260 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.296122 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:44 crc kubenswrapper[4856]: E1202 00:07:44.296376 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.314097 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.314138 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.314150 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.314205 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.314219 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.416521 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.416576 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.416620 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.416645 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.416664 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.519790 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.519835 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.519846 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.519864 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.519875 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.622192 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.622245 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.622255 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.622269 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.622280 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.725356 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.726017 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.726203 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.726447 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.726720 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.829046 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.829092 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.829104 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.829122 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.829135 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.931230 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.931275 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.931288 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.931307 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:44 crc kubenswrapper[4856]: I1202 00:07:44.931320 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:44Z","lastTransitionTime":"2025-12-02T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.034652 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.034728 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.034753 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.034786 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.034812 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.137910 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.137963 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.137976 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.137997 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.138014 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.244237 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.244299 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.244317 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.244353 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.244368 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.251408 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:45 crc kubenswrapper[4856]: E1202 00:07:45.251728 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.346907 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.346952 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.346973 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.347000 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.347015 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.449509 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.449814 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.449905 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.450215 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.450390 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.553062 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.553095 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.553103 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.553117 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.553126 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.655642 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.655688 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.655704 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.655729 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.655748 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.758216 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.758310 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.758338 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.758390 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.758410 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.861263 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.861343 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.861361 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.861385 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.861403 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.965329 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.965399 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.965420 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.965445 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:45 crc kubenswrapper[4856]: I1202 00:07:45.965462 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:45Z","lastTransitionTime":"2025-12-02T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.068913 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.068971 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.068988 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.069011 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.069028 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.171109 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.171157 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.171173 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.171195 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.171214 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.251396 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.251435 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.251453 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:46 crc kubenswrapper[4856]: E1202 00:07:46.251567 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:46 crc kubenswrapper[4856]: E1202 00:07:46.251726 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:46 crc kubenswrapper[4856]: E1202 00:07:46.251772 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.273786 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.273870 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.273893 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.273923 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.273944 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.376126 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.376182 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.376194 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.376214 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.376227 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.478979 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.479056 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.479073 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.479098 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.479120 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.482917 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.483018 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.483047 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.483075 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.483096 4856 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T00:07:46Z","lastTransitionTime":"2025-12-02T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.542406 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x"] Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.542975 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.544874 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.547086 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.547522 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.547763 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.549263 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.549324 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.549415 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b730bb62-5644-4ed3-aacd-9663b65f1663-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.549498 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b730bb62-5644-4ed3-aacd-9663b65f1663-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.549553 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b730bb62-5644-4ed3-aacd-9663b65f1663-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.634999 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=74.634976675 podStartE2EDuration="1m14.634976675s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.587702863 +0000 UTC m=+93.614070867" watchObservedRunningTime="2025-12-02 00:07:46.634976675 +0000 UTC m=+93.661344699" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.635247 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.635242782 podStartE2EDuration="47.635242782s" podCreationTimestamp="2025-12-02 00:06:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.635187131 +0000 UTC m=+93.661555145" watchObservedRunningTime="2025-12-02 00:07:46.635242782 +0000 UTC m=+93.661610796" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650294 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b730bb62-5644-4ed3-aacd-9663b65f1663-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650339 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b730bb62-5644-4ed3-aacd-9663b65f1663-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650501 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650528 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650571 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b730bb62-5644-4ed3-aacd-9663b65f1663-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650641 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.650647 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b730bb62-5644-4ed3-aacd-9663b65f1663-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.651732 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b730bb62-5644-4ed3-aacd-9663b65f1663-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.657437 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b730bb62-5644-4ed3-aacd-9663b65f1663-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.675660 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-rl6j8" podStartSLOduration=74.675639441 podStartE2EDuration="1m14.675639441s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.653220696 +0000 UTC m=+93.679588700" watchObservedRunningTime="2025-12-02 00:07:46.675639441 +0000 UTC m=+93.702007445" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.681326 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b730bb62-5644-4ed3-aacd-9663b65f1663-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nhh9x\" (UID: \"b730bb62-5644-4ed3-aacd-9663b65f1663\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.704971 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=74.704940751 podStartE2EDuration="1m14.704940751s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.691162813 +0000 UTC m=+93.717530827" watchObservedRunningTime="2025-12-02 00:07:46.704940751 +0000 UTC m=+93.731308765" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.746546 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podStartSLOduration=74.74652179 podStartE2EDuration="1m14.74652179s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.746038297 +0000 UTC m=+93.772406301" watchObservedRunningTime="2025-12-02 00:07:46.74652179 +0000 UTC m=+93.772889814" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.775017 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jzwq9" podStartSLOduration=73.774975437 podStartE2EDuration="1m13.774975437s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.774664749 +0000 UTC m=+93.801032743" watchObservedRunningTime="2025-12-02 00:07:46.774975437 +0000 UTC m=+93.801343461" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.776063 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-p5j4l" podStartSLOduration=74.776053565 podStartE2EDuration="1m14.776053565s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.759525568 +0000 UTC m=+93.785893592" watchObservedRunningTime="2025-12-02 00:07:46.776053565 +0000 UTC m=+93.802421589" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.818191 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=74.818161637 podStartE2EDuration="1m14.818161637s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.816926086 +0000 UTC m=+93.843294140" watchObservedRunningTime="2025-12-02 00:07:46.818161637 +0000 UTC m=+93.844529681" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.860508 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.902925 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-qlft7" podStartSLOduration=74.902890754 podStartE2EDuration="1m14.902890754s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.897525099 +0000 UTC m=+93.923893143" watchObservedRunningTime="2025-12-02 00:07:46.902890754 +0000 UTC m=+93.929258788" Dec 02 00:07:46 crc kubenswrapper[4856]: I1202 00:07:46.944280 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5mfwj" podStartSLOduration=74.944244028 podStartE2EDuration="1m14.944244028s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.918103008 +0000 UTC m=+93.944471052" watchObservedRunningTime="2025-12-02 00:07:46.944244028 +0000 UTC m=+93.970612062" Dec 02 00:07:47 crc kubenswrapper[4856]: I1202 00:07:47.251805 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:47 crc kubenswrapper[4856]: E1202 00:07:47.252605 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:47 crc kubenswrapper[4856]: I1202 00:07:47.763793 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" event={"ID":"b730bb62-5644-4ed3-aacd-9663b65f1663","Type":"ContainerStarted","Data":"44a4581e1e19c3330955935486d05ea4f7d371a7450630acf0463eb983a41fc9"} Dec 02 00:07:47 crc kubenswrapper[4856]: I1202 00:07:47.763897 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" event={"ID":"b730bb62-5644-4ed3-aacd-9663b65f1663","Type":"ContainerStarted","Data":"f02e3af51451f3f6e9dbaef29833f7be79036b5d7b7ff3e0a02b3078ccffd4bc"} Dec 02 00:07:47 crc kubenswrapper[4856]: I1202 00:07:47.792440 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.792401894 podStartE2EDuration="23.792401894s" podCreationTimestamp="2025-12-02 00:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:46.945646123 +0000 UTC m=+93.972014167" watchObservedRunningTime="2025-12-02 00:07:47.792401894 +0000 UTC m=+94.818769938" Dec 02 00:07:47 crc kubenswrapper[4856]: I1202 00:07:47.792727 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nhh9x" podStartSLOduration=75.792716782 podStartE2EDuration="1m15.792716782s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:07:47.791222234 +0000 UTC m=+94.817590238" watchObservedRunningTime="2025-12-02 00:07:47.792716782 +0000 UTC m=+94.819084816" Dec 02 00:07:48 crc kubenswrapper[4856]: I1202 00:07:48.251555 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:48 crc kubenswrapper[4856]: I1202 00:07:48.251569 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:48 crc kubenswrapper[4856]: E1202 00:07:48.251824 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:48 crc kubenswrapper[4856]: I1202 00:07:48.251579 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:48 crc kubenswrapper[4856]: E1202 00:07:48.252036 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:48 crc kubenswrapper[4856]: E1202 00:07:48.252167 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:49 crc kubenswrapper[4856]: I1202 00:07:49.251616 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:49 crc kubenswrapper[4856]: E1202 00:07:49.252379 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:49 crc kubenswrapper[4856]: I1202 00:07:49.253078 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:07:49 crc kubenswrapper[4856]: E1202 00:07:49.253414 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:07:50 crc kubenswrapper[4856]: I1202 00:07:50.251520 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:50 crc kubenswrapper[4856]: I1202 00:07:50.251614 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:50 crc kubenswrapper[4856]: I1202 00:07:50.251630 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:50 crc kubenswrapper[4856]: E1202 00:07:50.251693 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:50 crc kubenswrapper[4856]: E1202 00:07:50.251817 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:50 crc kubenswrapper[4856]: E1202 00:07:50.251966 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:51 crc kubenswrapper[4856]: I1202 00:07:51.236555 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:51 crc kubenswrapper[4856]: E1202 00:07:51.236979 4856 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:51 crc kubenswrapper[4856]: E1202 00:07:51.237076 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs podName:cbedea3d-dea3-407d-aae3-2ac725bcab34 nodeName:}" failed. No retries permitted until 2025-12-02 00:08:55.237053335 +0000 UTC m=+162.263421349 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs") pod "network-metrics-daemon-4zvgr" (UID: "cbedea3d-dea3-407d-aae3-2ac725bcab34") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 00:07:51 crc kubenswrapper[4856]: I1202 00:07:51.252295 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:51 crc kubenswrapper[4856]: E1202 00:07:51.252543 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:52 crc kubenswrapper[4856]: I1202 00:07:52.252167 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:52 crc kubenswrapper[4856]: I1202 00:07:52.252182 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:52 crc kubenswrapper[4856]: E1202 00:07:52.252422 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:52 crc kubenswrapper[4856]: E1202 00:07:52.252635 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:52 crc kubenswrapper[4856]: I1202 00:07:52.252724 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:52 crc kubenswrapper[4856]: E1202 00:07:52.252832 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:53 crc kubenswrapper[4856]: I1202 00:07:53.251767 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:53 crc kubenswrapper[4856]: E1202 00:07:53.254471 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:54 crc kubenswrapper[4856]: I1202 00:07:54.251852 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:54 crc kubenswrapper[4856]: I1202 00:07:54.251967 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:54 crc kubenswrapper[4856]: E1202 00:07:54.252158 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:54 crc kubenswrapper[4856]: I1202 00:07:54.252233 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:54 crc kubenswrapper[4856]: E1202 00:07:54.252315 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:54 crc kubenswrapper[4856]: E1202 00:07:54.252463 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:55 crc kubenswrapper[4856]: I1202 00:07:55.252495 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:55 crc kubenswrapper[4856]: E1202 00:07:55.252775 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:56 crc kubenswrapper[4856]: I1202 00:07:56.252224 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:56 crc kubenswrapper[4856]: I1202 00:07:56.252315 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:56 crc kubenswrapper[4856]: E1202 00:07:56.252339 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:56 crc kubenswrapper[4856]: I1202 00:07:56.252233 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:56 crc kubenswrapper[4856]: E1202 00:07:56.252542 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:56 crc kubenswrapper[4856]: E1202 00:07:56.252644 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:57 crc kubenswrapper[4856]: I1202 00:07:57.251271 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:57 crc kubenswrapper[4856]: E1202 00:07:57.251447 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:07:58 crc kubenswrapper[4856]: I1202 00:07:58.251350 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:07:58 crc kubenswrapper[4856]: E1202 00:07:58.251478 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:07:58 crc kubenswrapper[4856]: I1202 00:07:58.251789 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:07:58 crc kubenswrapper[4856]: I1202 00:07:58.251804 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:07:58 crc kubenswrapper[4856]: E1202 00:07:58.251890 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:07:58 crc kubenswrapper[4856]: E1202 00:07:58.252019 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:07:59 crc kubenswrapper[4856]: I1202 00:07:59.251881 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:07:59 crc kubenswrapper[4856]: E1202 00:07:59.252090 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:00 crc kubenswrapper[4856]: I1202 00:08:00.251742 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:00 crc kubenswrapper[4856]: E1202 00:08:00.251830 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:00 crc kubenswrapper[4856]: I1202 00:08:00.251880 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:00 crc kubenswrapper[4856]: I1202 00:08:00.251964 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:00 crc kubenswrapper[4856]: E1202 00:08:00.252023 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:00 crc kubenswrapper[4856]: E1202 00:08:00.252134 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:00 crc kubenswrapper[4856]: I1202 00:08:00.253119 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:08:00 crc kubenswrapper[4856]: E1202 00:08:00.253228 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:08:01 crc kubenswrapper[4856]: I1202 00:08:01.251862 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:01 crc kubenswrapper[4856]: E1202 00:08:01.252013 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:02 crc kubenswrapper[4856]: I1202 00:08:02.251663 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:02 crc kubenswrapper[4856]: I1202 00:08:02.251708 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:02 crc kubenswrapper[4856]: I1202 00:08:02.251686 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:02 crc kubenswrapper[4856]: E1202 00:08:02.251833 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:02 crc kubenswrapper[4856]: E1202 00:08:02.252003 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:02 crc kubenswrapper[4856]: E1202 00:08:02.252119 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:03 crc kubenswrapper[4856]: I1202 00:08:03.252333 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:03 crc kubenswrapper[4856]: E1202 00:08:03.254687 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:04 crc kubenswrapper[4856]: I1202 00:08:04.251472 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:04 crc kubenswrapper[4856]: I1202 00:08:04.251581 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:04 crc kubenswrapper[4856]: I1202 00:08:04.251773 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:04 crc kubenswrapper[4856]: E1202 00:08:04.251773 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:04 crc kubenswrapper[4856]: E1202 00:08:04.251939 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:04 crc kubenswrapper[4856]: E1202 00:08:04.252071 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.251437 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:05 crc kubenswrapper[4856]: E1202 00:08:05.251578 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.825216 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/1.log" Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.825944 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/0.log" Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.826060 4856 generic.go:334] "Generic (PLEG): container finished" podID="536def47-c9d3-4c3e-9b4a-3776e034998b" containerID="7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503" exitCode=1 Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.826158 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerDied","Data":"7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503"} Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.826218 4856 scope.go:117] "RemoveContainer" containerID="def9ed88e64c2bc9a765dcaeddcfb88d455d21b8b42fab9503f6eea671519129" Dec 02 00:08:05 crc kubenswrapper[4856]: I1202 00:08:05.826709 4856 scope.go:117] "RemoveContainer" containerID="7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503" Dec 02 00:08:05 crc kubenswrapper[4856]: E1202 00:08:05.826868 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5mfwj_openshift-multus(536def47-c9d3-4c3e-9b4a-3776e034998b)\"" pod="openshift-multus/multus-5mfwj" podUID="536def47-c9d3-4c3e-9b4a-3776e034998b" Dec 02 00:08:06 crc kubenswrapper[4856]: I1202 00:08:06.252181 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:06 crc kubenswrapper[4856]: E1202 00:08:06.252319 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:06 crc kubenswrapper[4856]: I1202 00:08:06.252384 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:06 crc kubenswrapper[4856]: I1202 00:08:06.252399 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:06 crc kubenswrapper[4856]: E1202 00:08:06.252561 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:06 crc kubenswrapper[4856]: E1202 00:08:06.252632 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:06 crc kubenswrapper[4856]: I1202 00:08:06.832079 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/1.log" Dec 02 00:08:07 crc kubenswrapper[4856]: I1202 00:08:07.252268 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:07 crc kubenswrapper[4856]: E1202 00:08:07.252491 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:08 crc kubenswrapper[4856]: I1202 00:08:08.252249 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:08 crc kubenswrapper[4856]: I1202 00:08:08.252299 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:08 crc kubenswrapper[4856]: I1202 00:08:08.252367 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:08 crc kubenswrapper[4856]: E1202 00:08:08.252438 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:08 crc kubenswrapper[4856]: E1202 00:08:08.252543 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:08 crc kubenswrapper[4856]: E1202 00:08:08.252674 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:09 crc kubenswrapper[4856]: I1202 00:08:09.252020 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:09 crc kubenswrapper[4856]: E1202 00:08:09.252291 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:10 crc kubenswrapper[4856]: I1202 00:08:10.251663 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:10 crc kubenswrapper[4856]: I1202 00:08:10.251681 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:10 crc kubenswrapper[4856]: E1202 00:08:10.252186 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:10 crc kubenswrapper[4856]: E1202 00:08:10.252296 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:10 crc kubenswrapper[4856]: I1202 00:08:10.251718 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:10 crc kubenswrapper[4856]: E1202 00:08:10.252909 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:11 crc kubenswrapper[4856]: I1202 00:08:11.251420 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:11 crc kubenswrapper[4856]: E1202 00:08:11.252264 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:11 crc kubenswrapper[4856]: I1202 00:08:11.252865 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:08:11 crc kubenswrapper[4856]: E1202 00:08:11.253204 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l5jg6_openshift-ovn-kubernetes(3051381c-49c8-4217-9831-013ca2931604)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" Dec 02 00:08:12 crc kubenswrapper[4856]: I1202 00:08:12.251871 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:12 crc kubenswrapper[4856]: I1202 00:08:12.251948 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:12 crc kubenswrapper[4856]: I1202 00:08:12.251893 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:12 crc kubenswrapper[4856]: E1202 00:08:12.252105 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:12 crc kubenswrapper[4856]: E1202 00:08:12.252222 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:12 crc kubenswrapper[4856]: E1202 00:08:12.252319 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:13 crc kubenswrapper[4856]: I1202 00:08:13.252258 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:13 crc kubenswrapper[4856]: E1202 00:08:13.253655 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:13 crc kubenswrapper[4856]: E1202 00:08:13.287080 4856 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 02 00:08:13 crc kubenswrapper[4856]: E1202 00:08:13.327952 4856 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 00:08:14 crc kubenswrapper[4856]: I1202 00:08:14.251205 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:14 crc kubenswrapper[4856]: I1202 00:08:14.251282 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:14 crc kubenswrapper[4856]: I1202 00:08:14.251236 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:14 crc kubenswrapper[4856]: E1202 00:08:14.251381 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:14 crc kubenswrapper[4856]: E1202 00:08:14.251736 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:14 crc kubenswrapper[4856]: E1202 00:08:14.251811 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:15 crc kubenswrapper[4856]: I1202 00:08:15.251429 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:15 crc kubenswrapper[4856]: E1202 00:08:15.251756 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:16 crc kubenswrapper[4856]: I1202 00:08:16.251559 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:16 crc kubenswrapper[4856]: I1202 00:08:16.251584 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:16 crc kubenswrapper[4856]: E1202 00:08:16.251694 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:16 crc kubenswrapper[4856]: E1202 00:08:16.251785 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:16 crc kubenswrapper[4856]: I1202 00:08:16.252389 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:16 crc kubenswrapper[4856]: E1202 00:08:16.252663 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:17 crc kubenswrapper[4856]: I1202 00:08:17.251717 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:17 crc kubenswrapper[4856]: E1202 00:08:17.251957 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.251699 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.251735 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.251769 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:18 crc kubenswrapper[4856]: E1202 00:08:18.252852 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.252406 4856 scope.go:117] "RemoveContainer" containerID="7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503" Dec 02 00:08:18 crc kubenswrapper[4856]: E1202 00:08:18.252370 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:18 crc kubenswrapper[4856]: E1202 00:08:18.252975 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:18 crc kubenswrapper[4856]: E1202 00:08:18.330095 4856 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.874503 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/1.log" Dec 02 00:08:18 crc kubenswrapper[4856]: I1202 00:08:18.874583 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerStarted","Data":"4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7"} Dec 02 00:08:19 crc kubenswrapper[4856]: I1202 00:08:19.252273 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:19 crc kubenswrapper[4856]: E1202 00:08:19.252476 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:20 crc kubenswrapper[4856]: I1202 00:08:20.251664 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:20 crc kubenswrapper[4856]: I1202 00:08:20.251734 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:20 crc kubenswrapper[4856]: I1202 00:08:20.251775 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:20 crc kubenswrapper[4856]: E1202 00:08:20.251917 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:20 crc kubenswrapper[4856]: E1202 00:08:20.252129 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:20 crc kubenswrapper[4856]: E1202 00:08:20.252252 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:21 crc kubenswrapper[4856]: I1202 00:08:21.251624 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:21 crc kubenswrapper[4856]: E1202 00:08:21.252154 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:22 crc kubenswrapper[4856]: I1202 00:08:22.251635 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:22 crc kubenswrapper[4856]: I1202 00:08:22.251639 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:22 crc kubenswrapper[4856]: E1202 00:08:22.251902 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:22 crc kubenswrapper[4856]: I1202 00:08:22.251674 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:22 crc kubenswrapper[4856]: E1202 00:08:22.252143 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:22 crc kubenswrapper[4856]: E1202 00:08:22.252312 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:23 crc kubenswrapper[4856]: I1202 00:08:23.251845 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:23 crc kubenswrapper[4856]: E1202 00:08:23.253872 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:23 crc kubenswrapper[4856]: E1202 00:08:23.330657 4856 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 00:08:24 crc kubenswrapper[4856]: I1202 00:08:24.252291 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:24 crc kubenswrapper[4856]: I1202 00:08:24.252370 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:24 crc kubenswrapper[4856]: E1202 00:08:24.252496 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:24 crc kubenswrapper[4856]: E1202 00:08:24.252752 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:24 crc kubenswrapper[4856]: I1202 00:08:24.253164 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:24 crc kubenswrapper[4856]: E1202 00:08:24.253571 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.252331 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:25 crc kubenswrapper[4856]: E1202 00:08:25.253033 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.253643 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.901708 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/3.log" Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.905804 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerStarted","Data":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.906387 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:08:25 crc kubenswrapper[4856]: I1202 00:08:25.939750 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podStartSLOduration=112.939723121 podStartE2EDuration="1m52.939723121s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:25.936890419 +0000 UTC m=+132.963258443" watchObservedRunningTime="2025-12-02 00:08:25.939723121 +0000 UTC m=+132.966091135" Dec 02 00:08:26 crc kubenswrapper[4856]: I1202 00:08:26.251189 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:26 crc kubenswrapper[4856]: I1202 00:08:26.251233 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:26 crc kubenswrapper[4856]: I1202 00:08:26.251328 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:26 crc kubenswrapper[4856]: E1202 00:08:26.251416 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:26 crc kubenswrapper[4856]: E1202 00:08:26.251475 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:26 crc kubenswrapper[4856]: E1202 00:08:26.251692 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:26 crc kubenswrapper[4856]: I1202 00:08:26.438093 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4zvgr"] Dec 02 00:08:26 crc kubenswrapper[4856]: I1202 00:08:26.438207 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:26 crc kubenswrapper[4856]: E1202 00:08:26.438363 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:28 crc kubenswrapper[4856]: I1202 00:08:28.251297 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:28 crc kubenswrapper[4856]: I1202 00:08:28.251325 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:28 crc kubenswrapper[4856]: E1202 00:08:28.251838 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:28 crc kubenswrapper[4856]: I1202 00:08:28.251325 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:28 crc kubenswrapper[4856]: E1202 00:08:28.251906 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:28 crc kubenswrapper[4856]: I1202 00:08:28.251359 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:28 crc kubenswrapper[4856]: E1202 00:08:28.252084 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:28 crc kubenswrapper[4856]: E1202 00:08:28.252243 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:28 crc kubenswrapper[4856]: E1202 00:08:28.332639 4856 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 00:08:30 crc kubenswrapper[4856]: I1202 00:08:30.252199 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:30 crc kubenswrapper[4856]: E1202 00:08:30.252393 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:30 crc kubenswrapper[4856]: I1202 00:08:30.252487 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:30 crc kubenswrapper[4856]: I1202 00:08:30.252564 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:30 crc kubenswrapper[4856]: I1202 00:08:30.252503 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:30 crc kubenswrapper[4856]: E1202 00:08:30.252714 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:30 crc kubenswrapper[4856]: E1202 00:08:30.252844 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:30 crc kubenswrapper[4856]: E1202 00:08:30.252957 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:32 crc kubenswrapper[4856]: I1202 00:08:32.251560 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:32 crc kubenswrapper[4856]: I1202 00:08:32.251605 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:32 crc kubenswrapper[4856]: I1202 00:08:32.251616 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:32 crc kubenswrapper[4856]: I1202 00:08:32.251624 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:32 crc kubenswrapper[4856]: E1202 00:08:32.251871 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 00:08:32 crc kubenswrapper[4856]: E1202 00:08:32.251980 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 00:08:32 crc kubenswrapper[4856]: E1202 00:08:32.252103 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4zvgr" podUID="cbedea3d-dea3-407d-aae3-2ac725bcab34" Dec 02 00:08:32 crc kubenswrapper[4856]: E1202 00:08:32.252152 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.252235 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.252863 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.252931 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.253019 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.256741 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.257512 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.257923 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.258586 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.259801 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 00:08:34 crc kubenswrapper[4856]: I1202 00:08:34.260384 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 00:08:35 crc kubenswrapper[4856]: I1202 00:08:35.061814 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:08:35 crc kubenswrapper[4856]: I1202 00:08:35.061931 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:08:37 crc kubenswrapper[4856]: I1202 00:08:37.211509 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.244078 4856 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.304727 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.305577 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.307520 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.308452 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.310903 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.311777 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.312882 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.312936 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.312981 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.313695 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.313764 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4mgld"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.314775 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.315024 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.322035 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.323881 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324123 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324354 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324525 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324693 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324804 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.324864 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.325471 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.325858 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.329834 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.331042 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.334874 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.337667 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.338359 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s2rvp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.338675 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.338871 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-54k5c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.338967 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.339200 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.339252 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rkvl8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.339432 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.339636 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.339700 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.341463 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.341783 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.370037 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.370512 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.386904 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.387163 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.387658 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.387982 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.388116 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.388859 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.389214 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.389559 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.389726 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.390238 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.390419 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.395662 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.396142 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.396618 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.397197 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.397392 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.397873 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4ztk7"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.398296 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29410560-9nw7n"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.398539 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.398786 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.399978 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.400136 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.401936 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402189 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402191 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402307 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402545 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402570 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.402714 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.404624 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.404745 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.404839 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405072 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405386 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pdb4m"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405458 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405616 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405798 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.405817 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.406087 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.406412 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.406632 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.406990 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.407705 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.408247 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.409424 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.409623 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.409822 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.410030 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.416205 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-89qzj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.416857 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-6zlxx"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.417299 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.417731 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.417801 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.418233 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.420037 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.420097 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.420455 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.422811 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.423122 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.423248 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.423413 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.423551 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.424469 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.424691 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.424865 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.425027 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.425177 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.425364 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.427116 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.448280 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.448433 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.464168 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.466287 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.467365 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.467482 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.467934 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.468341 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.468348 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.468879 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.469267 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.469404 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.469628 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.469848 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.470253 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.470525 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.470829 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.471078 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.471327 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.471570 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.471991 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.472263 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.472468 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.472660 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.473733 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.474352 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.475111 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.475488 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.475907 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.480847 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.482329 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.482641 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vstlv"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.483340 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-49wjq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.483533 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.483877 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.484828 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.485201 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.485918 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486257 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486344 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486684 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486713 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486851 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486893 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.486902 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.487443 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.489350 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-5phk8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.494429 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.494897 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.495139 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.491162 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.495810 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.495819 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.495962 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.491712 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.492149 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496725 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496849 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6t8k\" (UniqueName: \"kubernetes.io/projected/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-kube-api-access-d6t8k\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496889 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szf2h\" (UniqueName: \"kubernetes.io/projected/fa22af34-34f4-4fb8-8512-c5de1c159d63-kube-api-access-szf2h\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496923 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-client\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496966 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.496999 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdee0db3-d504-4b9e-81d6-97bb134a3271-serving-cert\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497023 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/34d7d362-ebe4-4476-8f52-646e974fb07d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497063 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd8qf\" (UniqueName: \"kubernetes.io/projected/93046834-963c-4132-a184-d9541f761870-kube-api-access-hd8qf\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497089 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-config\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497111 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497132 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-service-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497157 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497197 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497235 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-serving-cert\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497256 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-image-import-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497289 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497326 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw8bp\" (UniqueName: \"kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497350 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skfcm\" (UniqueName: \"kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497384 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-policies\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497414 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksqjk\" (UniqueName: \"kubernetes.io/projected/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-kube-api-access-ksqjk\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497454 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497492 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-audit-dir\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497532 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497557 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-dir\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497640 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497681 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-serving-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497713 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-client\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497734 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497763 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497800 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-config\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497856 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-encryption-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497895 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-serving-cert\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497939 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d7d362-ebe4-4476-8f52-646e974fb07d-serving-cert\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.497976 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.498021 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.498865 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.498917 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499005 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499038 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499073 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499104 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93046834-963c-4132-a184-d9541f761870-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499210 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2z75\" (UniqueName: \"kubernetes.io/projected/34d7d362-ebe4-4476-8f52-646e974fb07d-kube-api-access-k2z75\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499294 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-images\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499349 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa22af34-34f4-4fb8-8512-c5de1c159d63-serving-cert\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499457 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499536 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499571 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5xbf\" (UniqueName: \"kubernetes.io/projected/11c6790a-0083-45a1-955d-af4fe38ac958-kube-api-access-r5xbf\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499642 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-encryption-config\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499676 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sg5j\" (UniqueName: \"kubernetes.io/projected/cdee0db3-d504-4b9e-81d6-97bb134a3271-kube-api-access-4sg5j\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499728 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-node-pullsecrets\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499757 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499779 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-trusted-ca\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499788 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499830 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-config\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499855 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.499965 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvdwq\" (UniqueName: \"kubernetes.io/projected/53f12dd9-fe11-47df-9c75-d812a1a80309-kube-api-access-hvdwq\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.500058 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-audit\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.500094 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.503077 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.503503 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.511713 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.514347 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.515512 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.517294 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.518181 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.519208 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.522061 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.526797 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.528349 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.528668 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.528355 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.529260 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.529936 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.531877 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rlmv8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.532036 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.533143 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.534882 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.535701 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.536306 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-54k5c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.537552 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.538747 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s2rvp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.539923 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29410560-9nw7n"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.541779 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4mgld"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.544243 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rkvl8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.545238 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-sdthq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.546913 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-s5rmh"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.547033 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.547667 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.547730 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.548203 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.549227 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4ztk7"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.551790 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.551812 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-89qzj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.553180 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pdb4m"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.554308 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.555486 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.557055 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.558909 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-49wjq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.560503 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-4r66c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.561105 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.561704 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xls64"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.562767 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.564616 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.565849 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.567000 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-6zlxx"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.567316 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.568168 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.569261 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.570321 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.571766 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.574641 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vstlv"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.576206 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.577931 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.582307 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.583363 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.584381 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.585466 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xls64"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.586539 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.587605 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.588149 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.590559 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.593496 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sdthq"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.595677 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.600885 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.601525 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-images\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.601607 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f9c7450-45c7-4c5e-8b34-d128ee553a82-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.601651 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.601701 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.601735 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5xbf\" (UniqueName: \"kubernetes.io/projected/11c6790a-0083-45a1-955d-af4fe38ac958-kube-api-access-r5xbf\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602438 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-encryption-config\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602537 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eda44bb3-b2c2-468b-984d-88809371a6b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602618 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8lcb\" (UniqueName: \"kubernetes.io/projected/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-kube-api-access-t8lcb\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602652 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602680 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71754574-f7cd-4f47-916a-efd57a69e4ad-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602790 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-config\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.602997 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.603650 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-config\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.603669 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-images\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.603877 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvdwq\" (UniqueName: \"kubernetes.io/projected/53f12dd9-fe11-47df-9c75-d812a1a80309-kube-api-access-hvdwq\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604079 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-audit\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604220 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6tnk\" (UniqueName: \"kubernetes.io/projected/ecbd556c-e5af-4f34-8351-ef9ff3416abe-kube-api-access-s6tnk\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604345 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44d156b7-bec9-4cf2-8a71-91c94a4db280-metrics-tls\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604479 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-client\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604618 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604732 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-audit\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.603883 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.604751 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-serving-cert\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605023 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p4ct\" (UniqueName: \"kubernetes.io/projected/760efad4-17b6-4a2c-8d36-68a59d2c60be-kube-api-access-8p4ct\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605074 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-client\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605095 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhxb6\" (UniqueName: \"kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605155 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/34d7d362-ebe4-4476-8f52-646e974fb07d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605181 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eda44bb3-b2c2-468b-984d-88809371a6b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605209 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605245 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-console-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605307 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605338 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71754574-f7cd-4f47-916a-efd57a69e4ad-config\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605377 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605406 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605439 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-image-import-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605476 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-stats-auth\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605505 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605534 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksqjk\" (UniqueName: \"kubernetes.io/projected/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-kube-api-access-ksqjk\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605562 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-metrics-tls\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605625 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605653 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f9c7450-45c7-4c5e-8b34-d128ee553a82-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605705 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605730 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pg6h\" (UniqueName: \"kubernetes.io/projected/23476e03-4f2c-426e-a180-9ad48a56d758-kube-api-access-5pg6h\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605758 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-audit-dir\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605782 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605804 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxtgb\" (UniqueName: \"kubernetes.io/projected/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-kube-api-access-wxtgb\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605824 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eda44bb3-b2c2-468b-984d-88809371a6b7-config\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605855 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecbd556c-e5af-4f34-8351-ef9ff3416abe-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605882 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-images\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605904 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-srv-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605928 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-proxy-tls\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605951 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrf2m\" (UniqueName: \"kubernetes.io/projected/0f9c7450-45c7-4c5e-8b34-d128ee553a82-kube-api-access-rrf2m\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605972 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-metrics-certs\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.605997 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606025 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606050 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606071 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606092 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-service-ca\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606114 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2z75\" (UniqueName: \"kubernetes.io/projected/34d7d362-ebe4-4476-8f52-646e974fb07d-kube-api-access-k2z75\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606133 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606158 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa22af34-34f4-4fb8-8512-c5de1c159d63-serving-cert\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606189 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvr66\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-kube-api-access-qvr66\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606219 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/263d4630-69c0-4f10-a3c6-7f88b7836533-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606266 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sg5j\" (UniqueName: \"kubernetes.io/projected/cdee0db3-d504-4b9e-81d6-97bb134a3271-kube-api-access-4sg5j\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606289 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606319 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-service-ca-bundle\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606343 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-node-pullsecrets\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606365 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606392 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-trusted-ca\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606436 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5b4c6d3-261d-4477-aac1-67034bf1a503-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606461 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.606830 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rlmv8"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.607078 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-encryption-config\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.607691 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-audit-dir\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.607978 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.608170 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.608313 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.608830 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/34d7d362-ebe4-4476-8f52-646e974fb07d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.608992 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.609045 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4r66c"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.609944 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610071 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610116 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6t8k\" (UniqueName: \"kubernetes.io/projected/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-kube-api-access-d6t8k\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610156 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szf2h\" (UniqueName: \"kubernetes.io/projected/fa22af34-34f4-4fb8-8512-c5de1c159d63-kube-api-access-szf2h\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610247 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610283 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-config\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610287 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610317 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610348 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdee0db3-d504-4b9e-81d6-97bb134a3271-serving-cert\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610381 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-trusted-ca-bundle\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610412 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-proxy-tls\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610442 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd8qf\" (UniqueName: \"kubernetes.io/projected/93046834-963c-4132-a184-d9541f761870-kube-api-access-hd8qf\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610473 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-config\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610504 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-service-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610532 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjzgn\" (UniqueName: \"kubernetes.io/projected/f2c1346e-6e8d-4acb-b329-88bf72eaef2b-kube-api-access-hjzgn\") pod \"migrator-59844c95c7-fwb5c\" (UID: \"f2c1346e-6e8d-4acb-b329-88bf72eaef2b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610542 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-trusted-ca\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.610005 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.611781 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.612023 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.612212 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-image-import-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613664 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa22af34-34f4-4fb8-8512-c5de1c159d63-config\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613710 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613786 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lkv4\" (UniqueName: \"kubernetes.io/projected/f4c09d66-5b48-47b0-9696-4380fcc8edf3-kube-api-access-9lkv4\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613821 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/11c6790a-0083-45a1-955d-af4fe38ac958-node-pullsecrets\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613947 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.613949 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-oauth-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614027 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-serving-cert\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614193 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/263d4630-69c0-4f10-a3c6-7f88b7836533-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614235 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt8g9\" (UniqueName: \"kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614320 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614376 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw8bp\" (UniqueName: \"kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614480 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614487 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdee0db3-d504-4b9e-81d6-97bb134a3271-service-ca-bundle\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614485 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skfcm\" (UniqueName: \"kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614650 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckjfk\" (UniqueName: \"kubernetes.io/projected/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-kube-api-access-ckjfk\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614717 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88qs8\" (UniqueName: \"kubernetes.io/projected/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-kube-api-access-88qs8\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614798 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-policies\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.614865 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615118 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-trusted-ca-bundle\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615474 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615514 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-policies\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615779 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615621 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdee0db3-d504-4b9e-81d6-97bb134a3271-serving-cert\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618272 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa22af34-34f4-4fb8-8512-c5de1c159d63-serving-cert\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.615165 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px954\" (UniqueName: \"kubernetes.io/projected/f5b4c6d3-261d-4477-aac1-67034bf1a503-kube-api-access-px954\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618707 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618728 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-default-certificate\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618751 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj"] Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618756 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-dir\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618781 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53f12dd9-fe11-47df-9c75-d812a1a80309-audit-dir\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618807 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b4c6d3-261d-4477-aac1-67034bf1a503-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618829 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckl2j\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-kube-api-access-ckl2j\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618848 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71754574-f7cd-4f47-916a-efd57a69e4ad-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618868 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618887 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-serving-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618912 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-client\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618929 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618952 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.618992 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-config\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619011 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-oauth-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619034 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-encryption-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619052 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-serving-cert\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619069 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d7d362-ebe4-4476-8f52-646e974fb07d-serving-cert\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619087 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619104 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-profile-collector-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619122 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbbwr\" (UniqueName: \"kubernetes.io/projected/7ecc82d8-17ea-4639-a4cf-2dea25574574-kube-api-access-nbbwr\") pod \"downloads-7954f5f757-89qzj\" (UID: \"7ecc82d8-17ea-4639-a4cf-2dea25574574\") " pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619147 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619150 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-serving-cert\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619167 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-service-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619190 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619209 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93046834-963c-4132-a184-d9541f761870-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619227 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44d156b7-bec9-4cf2-8a71-91c94a4db280-trusted-ca\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619249 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619269 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.619863 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93046834-963c-4132-a184-d9541f761870-config\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.620025 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.620321 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.620732 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-serving-ca\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.621934 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.622398 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.622959 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.624065 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.625461 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-encryption-config\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.625852 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.626516 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93046834-963c-4132-a184-d9541f761870-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.626739 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-serving-cert\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.627459 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/11c6790a-0083-45a1-955d-af4fe38ac958-etcd-client\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.627469 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/53f12dd9-fe11-47df-9c75-d812a1a80309-etcd-client\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.627555 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.628171 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.628708 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d7d362-ebe4-4476-8f52-646e974fb07d-serving-cert\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.649860 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.674356 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.695503 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.707665 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720218 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt8g9\" (UniqueName: \"kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720255 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/263d4630-69c0-4f10-a3c6-7f88b7836533-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720293 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckjfk\" (UniqueName: \"kubernetes.io/projected/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-kube-api-access-ckjfk\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720330 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88qs8\" (UniqueName: \"kubernetes.io/projected/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-kube-api-access-88qs8\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720352 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720369 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px954\" (UniqueName: \"kubernetes.io/projected/f5b4c6d3-261d-4477-aac1-67034bf1a503-kube-api-access-px954\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720403 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720422 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-default-certificate\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720440 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckl2j\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-kube-api-access-ckl2j\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720457 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71754574-f7cd-4f47-916a-efd57a69e4ad-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720474 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b4c6d3-261d-4477-aac1-67034bf1a503-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720499 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-oauth-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720528 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-profile-collector-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720547 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbbwr\" (UniqueName: \"kubernetes.io/projected/7ecc82d8-17ea-4639-a4cf-2dea25574574-kube-api-access-nbbwr\") pod \"downloads-7954f5f757-89qzj\" (UID: \"7ecc82d8-17ea-4639-a4cf-2dea25574574\") " pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720565 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720583 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-service-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720622 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44d156b7-bec9-4cf2-8a71-91c94a4db280-trusted-ca\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720646 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f9c7450-45c7-4c5e-8b34-d128ee553a82-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720687 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eda44bb3-b2c2-468b-984d-88809371a6b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720708 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8lcb\" (UniqueName: \"kubernetes.io/projected/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-kube-api-access-t8lcb\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720726 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720744 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71754574-f7cd-4f47-916a-efd57a69e4ad-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720770 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6tnk\" (UniqueName: \"kubernetes.io/projected/ecbd556c-e5af-4f34-8351-ef9ff3416abe-kube-api-access-s6tnk\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720786 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44d156b7-bec9-4cf2-8a71-91c94a4db280-metrics-tls\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720801 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-client\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720818 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720834 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p4ct\" (UniqueName: \"kubernetes.io/projected/760efad4-17b6-4a2c-8d36-68a59d2c60be-kube-api-access-8p4ct\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720849 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-serving-cert\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720868 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhxb6\" (UniqueName: \"kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720885 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eda44bb3-b2c2-468b-984d-88809371a6b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720903 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720919 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-console-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720934 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71754574-f7cd-4f47-916a-efd57a69e4ad-config\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720953 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-stats-auth\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720970 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.720993 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-metrics-tls\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721012 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721027 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f9c7450-45c7-4c5e-8b34-d128ee553a82-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721047 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pg6h\" (UniqueName: \"kubernetes.io/projected/23476e03-4f2c-426e-a180-9ad48a56d758-kube-api-access-5pg6h\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721063 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxtgb\" (UniqueName: \"kubernetes.io/projected/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-kube-api-access-wxtgb\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721082 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eda44bb3-b2c2-468b-984d-88809371a6b7-config\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721099 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecbd556c-e5af-4f34-8351-ef9ff3416abe-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721115 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-images\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721147 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-proxy-tls\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721164 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-srv-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721211 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrf2m\" (UniqueName: \"kubernetes.io/projected/0f9c7450-45c7-4c5e-8b34-d128ee553a82-kube-api-access-rrf2m\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721227 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-metrics-certs\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721249 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721274 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721294 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-service-ca\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721312 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvr66\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-kube-api-access-qvr66\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721354 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/263d4630-69c0-4f10-a3c6-7f88b7836533-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721384 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721403 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-service-ca-bundle\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721423 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5b4c6d3-261d-4477-aac1-67034bf1a503-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721440 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721467 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721483 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-config\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721499 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-proxy-tls\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721514 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-trusted-ca-bundle\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721536 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lkv4\" (UniqueName: \"kubernetes.io/projected/f4c09d66-5b48-47b0-9696-4380fcc8edf3-kube-api-access-9lkv4\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721551 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-oauth-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721568 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjzgn\" (UniqueName: \"kubernetes.io/projected/f2c1346e-6e8d-4acb-b329-88bf72eaef2b-kube-api-access-hjzgn\") pod \"migrator-59844c95c7-fwb5c\" (UID: \"f2c1346e-6e8d-4acb-b329-88bf72eaef2b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721568 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-oauth-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721731 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-service-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.721814 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71754574-f7cd-4f47-916a-efd57a69e4ad-config\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.722567 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eda44bb3-b2c2-468b-984d-88809371a6b7-config\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.722644 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.723019 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-ca\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.723331 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/263d4630-69c0-4f10-a3c6-7f88b7836533-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.723425 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.723660 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724205 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44d156b7-bec9-4cf2-8a71-91c94a4db280-trusted-ca\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724235 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5b4c6d3-261d-4477-aac1-67034bf1a503-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724232 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-console-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724651 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44d156b7-bec9-4cf2-8a71-91c94a4db280-metrics-tls\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724831 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f9c7450-45c7-4c5e-8b34-d128ee553a82-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.724915 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.725007 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/760efad4-17b6-4a2c-8d36-68a59d2c60be-config\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.725157 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-serving-cert\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.725276 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f9c7450-45c7-4c5e-8b34-d128ee553a82-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.725847 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71754574-f7cd-4f47-916a-efd57a69e4ad-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.725957 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-service-ca\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.726097 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-proxy-tls\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.726256 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23476e03-4f2c-426e-a180-9ad48a56d758-trusted-ca-bundle\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.727773 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.727869 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-oauth-config\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.728020 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/23476e03-4f2c-426e-a180-9ad48a56d758-console-serving-cert\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.728018 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/760efad4-17b6-4a2c-8d36-68a59d2c60be-etcd-client\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.728375 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eda44bb3-b2c2-468b-984d-88809371a6b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.728814 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.740191 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/263d4630-69c0-4f10-a3c6-7f88b7836533-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.748228 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.767569 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.801250 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.806149 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5b4c6d3-261d-4477-aac1-67034bf1a503-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.807917 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.828448 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.848249 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.868997 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.889144 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.908669 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.927230 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.968196 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 00:08:38 crc kubenswrapper[4856]: I1202 00:08:38.987852 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.000142 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecbd556c-e5af-4f34-8351-ef9ff3416abe-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.009572 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.028253 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.038519 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-metrics-tls\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.049253 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.068993 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.089581 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.095095 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-profile-collector-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.109237 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.117349 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4c09d66-5b48-47b0-9696-4380fcc8edf3-srv-cert\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.129033 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.148025 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.169244 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.188757 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.194563 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-images\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.208487 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.228496 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.238583 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-default-certificate\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.248497 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.268983 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.279656 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-stats-auth\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.289338 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.309501 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.317651 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-proxy-tls\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.328278 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.335536 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.348864 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.359742 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-metrics-certs\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.369203 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.388279 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.396336 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.409242 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.428565 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.434987 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.448775 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.454259 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-service-ca-bundle\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.468527 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.497748 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.504132 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.506658 4856 request.go:700] Waited for 1.010164402s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.508627 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.528894 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.548075 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.567879 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.608030 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.628157 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.648130 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.667864 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.688140 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.707879 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.727629 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.747208 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.779847 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.788908 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.807426 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.828530 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.849507 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.868718 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.888269 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.908575 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.928777 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.948724 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.968576 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 00:08:39 crc kubenswrapper[4856]: I1202 00:08:39.988853 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.007619 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.028666 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.048677 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.068274 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.088134 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.108509 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.128437 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.146685 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:40 crc kubenswrapper[4856]: E1202 00:08:40.147030 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:10:42.146990956 +0000 UTC m=+269.173359000 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.147428 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.148319 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.148656 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.168963 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.188401 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.207878 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.228329 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.248220 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.248445 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.248545 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.248940 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.254019 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.254279 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.256016 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.268929 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.281402 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.288578 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.301891 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.309016 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.318015 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.328469 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.349156 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.368803 4856 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.389633 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.411957 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.447634 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5xbf\" (UniqueName: \"kubernetes.io/projected/11c6790a-0083-45a1-955d-af4fe38ac958-kube-api-access-r5xbf\") pod \"apiserver-76f77b778f-4mgld\" (UID: \"11c6790a-0083-45a1-955d-af4fe38ac958\") " pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.470342 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvdwq\" (UniqueName: \"kubernetes.io/projected/53f12dd9-fe11-47df-9c75-d812a1a80309-kube-api-access-hvdwq\") pod \"apiserver-7bbb656c7d-7kxnr\" (UID: \"53f12dd9-fe11-47df-9c75-d812a1a80309\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.481202 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sg5j\" (UniqueName: \"kubernetes.io/projected/cdee0db3-d504-4b9e-81d6-97bb134a3271-kube-api-access-4sg5j\") pod \"authentication-operator-69f744f599-rkvl8\" (UID: \"cdee0db3-d504-4b9e-81d6-97bb134a3271\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.503034 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksqjk\" (UniqueName: \"kubernetes.io/projected/6cc9abf3-4d0e-49ea-b140-b38a97769d4d-kube-api-access-ksqjk\") pod \"cluster-samples-operator-665b6dd947-chhj4\" (UID: \"6cc9abf3-4d0e-49ea-b140-b38a97769d4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.507993 4856 request.go:700] Waited for 1.898177989s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-config-operator/serviceaccounts/openshift-config-operator/token Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.523922 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.526834 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2z75\" (UniqueName: \"kubernetes.io/projected/34d7d362-ebe4-4476-8f52-646e974fb07d-kube-api-access-k2z75\") pod \"openshift-config-operator-7777fb866f-54k5c\" (UID: \"34d7d362-ebe4-4476-8f52-646e974fb07d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:40 crc kubenswrapper[4856]: W1202 00:08:40.531925 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-d896576378798767cb76bf52802cdc430430de9a6fe27f0c086c920a6fa8e293 WatchSource:0}: Error finding container d896576378798767cb76bf52802cdc430430de9a6fe27f0c086c920a6fa8e293: Status 404 returned error can't find the container with id d896576378798767cb76bf52802cdc430430de9a6fe27f0c086c920a6fa8e293 Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.543272 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szf2h\" (UniqueName: \"kubernetes.io/projected/fa22af34-34f4-4fb8-8512-c5de1c159d63-kube-api-access-szf2h\") pod \"console-operator-58897d9998-s2rvp\" (UID: \"fa22af34-34f4-4fb8-8512-c5de1c159d63\") " pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.543986 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.564532 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd8qf\" (UniqueName: \"kubernetes.io/projected/93046834-963c-4132-a184-d9541f761870-kube-api-access-hd8qf\") pod \"machine-api-operator-5694c8668f-4ztk7\" (UID: \"93046834-963c-4132-a184-d9541f761870\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:40 crc kubenswrapper[4856]: W1202 00:08:40.572731 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-7777902c434215aed04e67a672f89de65d0e9c6248868cc12eda3306b258dac9 WatchSource:0}: Error finding container 7777902c434215aed04e67a672f89de65d0e9c6248868cc12eda3306b258dac9: Status 404 returned error can't find the container with id 7777902c434215aed04e67a672f89de65d0e9c6248868cc12eda3306b258dac9 Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.573814 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.577194 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.592045 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6t8k\" (UniqueName: \"kubernetes.io/projected/c6f67f96-77c7-473b-ae34-d0b5926ef9fa-kube-api-access-d6t8k\") pod \"openshift-apiserver-operator-796bbdcf4f-b9s78\" (UID: \"c6f67f96-77c7-473b-ae34-d0b5926ef9fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.603804 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw8bp\" (UniqueName: \"kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp\") pod \"oauth-openshift-558db77b4-7lsm8\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.611871 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.630646 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skfcm\" (UniqueName: \"kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm\") pod \"image-pruner-29410560-9nw7n\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.643337 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt8g9\" (UniqueName: \"kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9\") pod \"route-controller-manager-6576b87f9c-xph6l\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.674994 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckjfk\" (UniqueName: \"kubernetes.io/projected/4f7bfbc8-c98c-4600-99b6-ec82ba542c49-kube-api-access-ckjfk\") pod \"machine-config-operator-74547568cd-m8qvw\" (UID: \"4f7bfbc8-c98c-4600-99b6-ec82ba542c49\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.679971 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.685475 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px954\" (UniqueName: \"kubernetes.io/projected/f5b4c6d3-261d-4477-aac1-67034bf1a503-kube-api-access-px954\") pod \"kube-storage-version-migrator-operator-b67b599dd-z5psp\" (UID: \"f5b4c6d3-261d-4477-aac1-67034bf1a503\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.706994 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.720407 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88qs8\" (UniqueName: \"kubernetes.io/projected/819ff29c-f7f5-442f-8eb5-e7ccc25d2219-kube-api-access-88qs8\") pod \"router-default-5444994796-5phk8\" (UID: \"819ff29c-f7f5-442f-8eb5-e7ccc25d2219\") " pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.728153 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p4ct\" (UniqueName: \"kubernetes.io/projected/760efad4-17b6-4a2c-8d36-68a59d2c60be-kube-api-access-8p4ct\") pod \"etcd-operator-b45778765-pdb4m\" (UID: \"760efad4-17b6-4a2c-8d36-68a59d2c60be\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.731798 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.747035 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8lcb\" (UniqueName: \"kubernetes.io/projected/ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3-kube-api-access-t8lcb\") pod \"machine-config-controller-84d6567774-wlphq\" (UID: \"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.763863 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.770145 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.778773 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71754574-f7cd-4f47-916a-efd57a69e4ad-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ftzrj\" (UID: \"71754574-f7cd-4f47-916a-efd57a69e4ad\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.784190 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6tnk\" (UniqueName: \"kubernetes.io/projected/ecbd556c-e5af-4f34-8351-ef9ff3416abe-kube-api-access-s6tnk\") pod \"multus-admission-controller-857f4d67dd-vstlv\" (UID: \"ecbd556c-e5af-4f34-8351-ef9ff3416abe\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.784810 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-4mgld"] Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.785055 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.797310 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.803265 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjzgn\" (UniqueName: \"kubernetes.io/projected/f2c1346e-6e8d-4acb-b329-88bf72eaef2b-kube-api-access-hjzgn\") pod \"migrator-59844c95c7-fwb5c\" (UID: \"f2c1346e-6e8d-4acb-b329-88bf72eaef2b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.810666 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.814398 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rkvl8"] Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.815573 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.827105 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pg6h\" (UniqueName: \"kubernetes.io/projected/23476e03-4f2c-426e-a180-9ad48a56d758-kube-api-access-5pg6h\") pod \"console-f9d7485db-6zlxx\" (UID: \"23476e03-4f2c-426e-a180-9ad48a56d758\") " pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.848402 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxtgb\" (UniqueName: \"kubernetes.io/projected/9c4c1d11-deb4-4971-bf3e-768b4b30ee6a-kube-api-access-wxtgb\") pod \"dns-operator-744455d44c-49wjq\" (UID: \"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a\") " pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.865278 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbbwr\" (UniqueName: \"kubernetes.io/projected/7ecc82d8-17ea-4639-a4cf-2dea25574574-kube-api-access-nbbwr\") pod \"downloads-7954f5f757-89qzj\" (UID: \"7ecc82d8-17ea-4639-a4cf-2dea25574574\") " pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:40 crc kubenswrapper[4856]: W1202 00:08:40.878746 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod819ff29c_f7f5_442f_8eb5_e7ccc25d2219.slice/crio-c9b6d1e496a5525df7bcb92ea7d13968be92a4c43ce3598f074984819756b6bf WatchSource:0}: Error finding container c9b6d1e496a5525df7bcb92ea7d13968be92a4c43ce3598f074984819756b6bf: Status 404 returned error can't find the container with id c9b6d1e496a5525df7bcb92ea7d13968be92a4c43ce3598f074984819756b6bf Dec 02 00:08:40 crc kubenswrapper[4856]: W1202 00:08:40.879688 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c6790a_0083_45a1_955d_af4fe38ac958.slice/crio-fb238a75637254ac68f093afd53b83cf93bcabc52a174a88b54c0c4c8ae75245 WatchSource:0}: Error finding container fb238a75637254ac68f093afd53b83cf93bcabc52a174a88b54c0c4c8ae75245: Status 404 returned error can't find the container with id fb238a75637254ac68f093afd53b83cf93bcabc52a174a88b54c0c4c8ae75245 Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.880671 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.901865 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.905974 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhxb6\" (UniqueName: \"kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6\") pod \"controller-manager-879f6c89f-dp8tx\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.923335 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eda44bb3-b2c2-468b-984d-88809371a6b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zktnf\" (UID: \"eda44bb3-b2c2-468b-984d-88809371a6b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.927258 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.934263 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.941663 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.946744 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.949009 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckl2j\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-kube-api-access-ckl2j\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.956467 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.962567 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.963269 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lkv4\" (UniqueName: \"kubernetes.io/projected/f4c09d66-5b48-47b0-9696-4380fcc8edf3-kube-api-access-9lkv4\") pod \"catalog-operator-68c6474976-tj4lm\" (UID: \"f4c09d66-5b48-47b0-9696-4380fcc8edf3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.981884 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrf2m\" (UniqueName: \"kubernetes.io/projected/0f9c7450-45c7-4c5e-8b34-d128ee553a82-kube-api-access-rrf2m\") pod \"openshift-controller-manager-operator-756b6f6bc6-ct88q\" (UID: \"0f9c7450-45c7-4c5e-8b34-d128ee553a82\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.985139 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" Dec 02 00:08:40 crc kubenswrapper[4856]: I1202 00:08:40.997275 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.006750 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvr66\" (UniqueName: \"kubernetes.io/projected/263d4630-69c0-4f10-a3c6-7f88b7836533-kube-api-access-qvr66\") pod \"cluster-image-registry-operator-dc59b4c8b-t8qsp\" (UID: \"263d4630-69c0-4f10-a3c6-7f88b7836533\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.008875 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e57c62d2fc09e5dd503ffecb6192e3469f3ca0b30d800bcc9d675cf2edb4cee3"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.008915 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7777902c434215aed04e67a672f89de65d0e9c6248868cc12eda3306b258dac9"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.009270 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.011389 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" event={"ID":"11c6790a-0083-45a1-955d-af4fe38ac958","Type":"ContainerStarted","Data":"fb238a75637254ac68f093afd53b83cf93bcabc52a174a88b54c0c4c8ae75245"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.018570 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c94adaeb7f5d7e5bedd51f0b33d11ffa5c615698f3d846dc44faf1d24f8c480c"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.018624 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d896576378798767cb76bf52802cdc430430de9a6fe27f0c086c920a6fa8e293"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.028868 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"47b0be2e09502c4f88c08093322a6637b41d11b58ff6c9c98f402bbbf1f7c06f"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.028906 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"112b52ae4b0b261f955e532e8192e6b36d9727db64de3c582d663d541e334df9"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.030782 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5phk8" event={"ID":"819ff29c-f7f5-442f-8eb5-e7ccc25d2219","Type":"ContainerStarted","Data":"c9b6d1e496a5525df7bcb92ea7d13968be92a4c43ce3598f074984819756b6bf"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.035812 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44d156b7-bec9-4cf2-8a71-91c94a4db280-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6phhd\" (UID: \"44d156b7-bec9-4cf2-8a71-91c94a4db280\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.036988 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" event={"ID":"cdee0db3-d504-4b9e-81d6-97bb134a3271","Type":"ContainerStarted","Data":"f0d3a3d7468a59718f9fa3aaeb43c2f7a08b8203d2fd8f6c06abd55fdda697cb"} Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.054508 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.055911 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s2rvp"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077545 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077600 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077626 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782abbf8-ce88-46c6-bef2-708bca62dd57-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077644 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782abbf8-ce88-46c6-bef2-708bca62dd57-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077672 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077691 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077711 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077733 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782abbf8-ce88-46c6-bef2-708bca62dd57-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077756 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077774 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.077789 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btz9p\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.078136 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.57812412 +0000 UTC m=+148.604492124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.078450 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.078789 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-54k5c"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.079787 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4ztk7"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.104919 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.109800 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:41 crc kubenswrapper[4856]: W1202 00:08:41.145337 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34d7d362_ebe4_4476_8f52_646e974fb07d.slice/crio-49a198b8b09bbf72e279cfe13ef02ee1ed493d3f541884b8c10ab00a82db1aec WatchSource:0}: Error finding container 49a198b8b09bbf72e279cfe13ef02ee1ed493d3f541884b8c10ab00a82db1aec: Status 404 returned error can't find the container with id 49a198b8b09bbf72e279cfe13ef02ee1ed493d3f541884b8c10ab00a82db1aec Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.179895 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.180129 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.180157 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksb4r\" (UniqueName: \"kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.180224 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.680201704 +0000 UTC m=+148.706569708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.181499 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.181644 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcb7j\" (UniqueName: \"kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.181715 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.181736 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9wc7\" (UniqueName: \"kubernetes.io/projected/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-kube-api-access-q9wc7\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.181776 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-socket-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.182301 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.191022 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.691005249 +0000 UTC m=+148.717373253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.182915 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782abbf8-ce88-46c6-bef2-708bca62dd57-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.192773 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.192803 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btz9p\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.192868 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs9xc\" (UniqueName: \"kubernetes.io/projected/a753f707-5591-486d-b36c-217cdf0ae9df-kube-api-access-hs9xc\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.192912 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jptgc\" (UniqueName: \"kubernetes.io/projected/5b7f5674-184e-48f4-b455-239076effb38-kube-api-access-jptgc\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.193014 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c80eeea-e581-44bc-b901-10fb429f27e3-cert\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.193087 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt6d9\" (UniqueName: \"kubernetes.io/projected/8247592b-2d26-494b-a8ed-d4c1e052f7a6-kube-api-access-wt6d9\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.193188 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.194341 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c276957-366f-454e-bd11-de451d27e0be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.194770 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.194836 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.195315 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-webhook-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.197032 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782abbf8-ce88-46c6-bef2-708bca62dd57-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.198408 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.199325 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.199991 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-certs\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.200478 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.201780 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62tcd\" (UniqueName: \"kubernetes.io/projected/3c80eeea-e581-44bc-b901-10fb429f27e3-kube-api-access-62tcd\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.201901 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-csi-data-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.202304 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.204923 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-apiservice-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.207784 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.208877 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9frcq\" (UniqueName: \"kubernetes.io/projected/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-kube-api-access-9frcq\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.209055 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782abbf8-ce88-46c6-bef2-708bca62dd57-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.212677 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1471be81-ab52-4a5c-b350-115301184a17-signing-cabundle\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.213001 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-registration-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.213268 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782abbf8-ce88-46c6-bef2-708bca62dd57-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.213923 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c76858-8564-4349-9f31-936cf700a541-serving-cert\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.214018 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1471be81-ab52-4a5c-b350-115301184a17-signing-key\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.214267 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc0b2623-e7d2-4497-b688-977a23f8d922-config-volume\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.214362 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-machine-approver-tls\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.214435 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-srv-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.211095 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782abbf8-ce88-46c6-bef2-708bca62dd57-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.215353 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8247592b-2d26-494b-a8ed-d4c1e052f7a6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.215796 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-plugins-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.216518 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-node-bootstrap-token\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.216664 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7qxr\" (UniqueName: \"kubernetes.io/projected/1471be81-ab52-4a5c-b350-115301184a17-kube-api-access-b7qxr\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.216768 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.217095 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.215615 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.218253 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-tmpfs\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.220043 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-auth-proxy-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.220298 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c76858-8564-4349-9f31-936cf700a541-config\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.222161 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88mf7\" (UniqueName: \"kubernetes.io/projected/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-kube-api-access-88mf7\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.222247 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r9lk\" (UniqueName: \"kubernetes.io/projected/96c76858-8564-4349-9f31-936cf700a541-kube-api-access-8r9lk\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.222434 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cc0b2623-e7d2-4497-b688-977a23f8d922-metrics-tls\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.222661 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.223027 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfljn\" (UniqueName: \"kubernetes.io/projected/5c276957-366f-454e-bd11-de451d27e0be-kube-api-access-jfljn\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.223119 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq5pf\" (UniqueName: \"kubernetes.io/projected/cc0b2623-e7d2-4497-b688-977a23f8d922-kube-api-access-gq5pf\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.223295 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-mountpoint-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.224437 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.234831 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.261172 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.261986 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btz9p\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.268237 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782abbf8-ce88-46c6-bef2-708bca62dd57-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-7s95v\" (UID: \"782abbf8-ce88-46c6-bef2-708bca62dd57\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.272310 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.323776 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.323811 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.328412 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.328839 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.82880887 +0000 UTC m=+148.855176874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.328951 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.328994 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs9xc\" (UniqueName: \"kubernetes.io/projected/a753f707-5591-486d-b36c-217cdf0ae9df-kube-api-access-hs9xc\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329022 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jptgc\" (UniqueName: \"kubernetes.io/projected/5b7f5674-184e-48f4-b455-239076effb38-kube-api-access-jptgc\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329045 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c80eeea-e581-44bc-b901-10fb429f27e3-cert\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329072 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt6d9\" (UniqueName: \"kubernetes.io/projected/8247592b-2d26-494b-a8ed-d4c1e052f7a6-kube-api-access-wt6d9\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329109 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329133 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329163 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c276957-366f-454e-bd11-de451d27e0be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329188 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-webhook-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329215 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329233 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-certs\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329256 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329274 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62tcd\" (UniqueName: \"kubernetes.io/projected/3c80eeea-e581-44bc-b901-10fb429f27e3-kube-api-access-62tcd\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329289 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-csi-data-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329308 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-apiservice-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329325 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9frcq\" (UniqueName: \"kubernetes.io/projected/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-kube-api-access-9frcq\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329349 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1471be81-ab52-4a5c-b350-115301184a17-signing-cabundle\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329376 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-registration-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329398 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1471be81-ab52-4a5c-b350-115301184a17-signing-key\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329423 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c76858-8564-4349-9f31-936cf700a541-serving-cert\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329448 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc0b2623-e7d2-4497-b688-977a23f8d922-config-volume\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329506 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-machine-approver-tls\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329531 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-srv-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329556 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8247592b-2d26-494b-a8ed-d4c1e052f7a6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329574 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-plugins-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329610 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-node-bootstrap-token\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329626 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7qxr\" (UniqueName: \"kubernetes.io/projected/1471be81-ab52-4a5c-b350-115301184a17-kube-api-access-b7qxr\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329644 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329667 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329692 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-tmpfs\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329718 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-auth-proxy-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329734 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c76858-8564-4349-9f31-936cf700a541-config\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329754 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88mf7\" (UniqueName: \"kubernetes.io/projected/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-kube-api-access-88mf7\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329775 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r9lk\" (UniqueName: \"kubernetes.io/projected/96c76858-8564-4349-9f31-936cf700a541-kube-api-access-8r9lk\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329790 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cc0b2623-e7d2-4497-b688-977a23f8d922-metrics-tls\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329815 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfljn\" (UniqueName: \"kubernetes.io/projected/5c276957-366f-454e-bd11-de451d27e0be-kube-api-access-jfljn\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329857 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq5pf\" (UniqueName: \"kubernetes.io/projected/cc0b2623-e7d2-4497-b688-977a23f8d922-kube-api-access-gq5pf\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329880 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-mountpoint-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329906 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksb4r\" (UniqueName: \"kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329932 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcb7j\" (UniqueName: \"kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329952 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9wc7\" (UniqueName: \"kubernetes.io/projected/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-kube-api-access-q9wc7\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.329972 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-socket-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.330404 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-socket-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.338762 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-node-bootstrap-token\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.342550 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-webhook-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.343094 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c80eeea-e581-44bc-b901-10fb429f27e3-cert\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.343422 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.843406041 +0000 UTC m=+148.869774045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.343945 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.344313 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5b7f5674-184e-48f4-b455-239076effb38-certs\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.345520 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.348174 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.352438 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-auth-proxy-config\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.352853 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-tmpfs\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.353007 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.353291 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5c276957-366f-454e-bd11-de451d27e0be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.353767 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-mountpoint-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.353716 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-csi-data-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.354453 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cc0b2623-e7d2-4497-b688-977a23f8d922-config-volume\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.354514 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-plugins-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.354780 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c76858-8564-4349-9f31-936cf700a541-config\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.354863 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a753f707-5591-486d-b36c-217cdf0ae9df-registration-dir\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.355318 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1471be81-ab52-4a5c-b350-115301184a17-signing-cabundle\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.358697 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.359659 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.360223 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8247592b-2d26-494b-a8ed-d4c1e052f7a6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.362105 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cc0b2623-e7d2-4497-b688-977a23f8d922-metrics-tls\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.363617 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-apiservice-cert\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.364475 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt6d9\" (UniqueName: \"kubernetes.io/projected/8247592b-2d26-494b-a8ed-d4c1e052f7a6-kube-api-access-wt6d9\") pod \"package-server-manager-789f6589d5-2vztf\" (UID: \"8247592b-2d26-494b-a8ed-d4c1e052f7a6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.364557 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c76858-8564-4349-9f31-936cf700a541-serving-cert\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.368869 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1471be81-ab52-4a5c-b350-115301184a17-signing-key\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.369281 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-srv-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.370872 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-machine-approver-tls\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.374060 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.390276 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs9xc\" (UniqueName: \"kubernetes.io/projected/a753f707-5591-486d-b36c-217cdf0ae9df-kube-api-access-hs9xc\") pod \"csi-hostpathplugin-xls64\" (UID: \"a753f707-5591-486d-b36c-217cdf0ae9df\") " pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.405715 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jptgc\" (UniqueName: \"kubernetes.io/projected/5b7f5674-184e-48f4-b455-239076effb38-kube-api-access-jptgc\") pod \"machine-config-server-s5rmh\" (UID: \"5b7f5674-184e-48f4-b455-239076effb38\") " pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.424397 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.432324 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.433991 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r9lk\" (UniqueName: \"kubernetes.io/projected/96c76858-8564-4349-9f31-936cf700a541-kube-api-access-8r9lk\") pod \"service-ca-operator-777779d784-z6j5r\" (UID: \"96c76858-8564-4349-9f31-936cf700a541\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.436019 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:41.932816363 +0000 UTC m=+148.959184367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.458150 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7qxr\" (UniqueName: \"kubernetes.io/projected/1471be81-ab52-4a5c-b350-115301184a17-kube-api-access-b7qxr\") pod \"service-ca-9c57cc56f-rlmv8\" (UID: \"1471be81-ab52-4a5c-b350-115301184a17\") " pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.473033 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.494750 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9wc7\" (UniqueName: \"kubernetes.io/projected/5fc26b3d-7f39-4821-a9f6-eafe39f3c335-kube-api-access-q9wc7\") pod \"packageserver-d55dfcdfc-2vc5h\" (UID: \"5fc26b3d-7f39-4821-a9f6-eafe39f3c335\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.497996 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.510225 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.519898 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.519997 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.523340 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62tcd\" (UniqueName: \"kubernetes.io/projected/3c80eeea-e581-44bc-b901-10fb429f27e3-kube-api-access-62tcd\") pod \"ingress-canary-4r66c\" (UID: \"3c80eeea-e581-44bc-b901-10fb429f27e3\") " pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.531760 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq5pf\" (UniqueName: \"kubernetes.io/projected/cc0b2623-e7d2-4497-b688-977a23f8d922-kube-api-access-gq5pf\") pod \"dns-default-sdthq\" (UID: \"cc0b2623-e7d2-4497-b688-977a23f8d922\") " pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.535004 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.535359 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.535550 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.035520873 +0000 UTC m=+149.061888867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.543760 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s5rmh" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.552248 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4r66c" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.561406 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9frcq\" (UniqueName: \"kubernetes.io/projected/08442f50-cb64-4c0a-a8c6-ecd34ad7aa38-kube-api-access-9frcq\") pod \"olm-operator-6b444d44fb-hzp47\" (UID: \"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.564197 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88mf7\" (UniqueName: \"kubernetes.io/projected/c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa-kube-api-access-88mf7\") pod \"machine-approver-56656f9798-49zfd\" (UID: \"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.571093 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.571181 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-xls64" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.587088 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfljn\" (UniqueName: \"kubernetes.io/projected/5c276957-366f-454e-bd11-de451d27e0be-kube-api-access-jfljn\") pod \"control-plane-machine-set-operator-78cbb6b69f-q58fj\" (UID: \"5c276957-366f-454e-bd11-de451d27e0be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.600624 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29410560-9nw7n"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.601310 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcb7j\" (UniqueName: \"kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j\") pod \"marketplace-operator-79b997595-dllbd\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.621254 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.629019 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksb4r\" (UniqueName: \"kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r\") pod \"collect-profiles-29410560-wj24r\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.636656 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.636866 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.136809047 +0000 UTC m=+149.163177051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.636927 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.637503 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.137477544 +0000 UTC m=+149.163845538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.738466 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.738631 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.238608424 +0000 UTC m=+149.264976428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.739210 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.739518 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.239510047 +0000 UTC m=+149.265878051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.758332 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.762029 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.769356 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.776212 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.794333 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pdb4m"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.796152 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf"] Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.813250 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.829064 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.841486 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.848065 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.341955831 +0000 UTC m=+149.368323845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:41 crc kubenswrapper[4856]: I1202 00:08:41.943105 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:41 crc kubenswrapper[4856]: E1202 00:08:41.943718 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.443701726 +0000 UTC m=+149.470069730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.046054 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.046511 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.546495629 +0000 UTC m=+149.572863633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.122300 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" event={"ID":"f5b4c6d3-261d-4477-aac1-67034bf1a503","Type":"ContainerStarted","Data":"d7fa249a9f50b9d566dbfb0a740c0f41f418c6ff85c7ce046d17a88dcedeeb5b"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.132462 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" event={"ID":"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa","Type":"ContainerStarted","Data":"36190be156ccefdcce857a5e94a2820166d9a4f3101b191053b5145ab117fd86"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.135777 4856 generic.go:334] "Generic (PLEG): container finished" podID="11c6790a-0083-45a1-955d-af4fe38ac958" containerID="dd8144ee3bc126f43a38f6c20e2fc7c50f2ff285d5c47490d98a52069762308d" exitCode=0 Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.135815 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" event={"ID":"11c6790a-0083-45a1-955d-af4fe38ac958","Type":"ContainerDied","Data":"dd8144ee3bc126f43a38f6c20e2fc7c50f2ff285d5c47490d98a52069762308d"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.137884 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" event={"ID":"cdee0db3-d504-4b9e-81d6-97bb134a3271","Type":"ContainerStarted","Data":"f33ba3723ed388921633ad489b8c35e785378d2eb7c3a84817198cb672750ee8"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.149646 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.150012 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.649996639 +0000 UTC m=+149.676364643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.151458 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" event={"ID":"fa22af34-34f4-4fb8-8512-c5de1c159d63","Type":"ContainerStarted","Data":"92e5f976cb2fb23dbceedeab14320596751cf6f43d3d095256d2c5537f03f74a"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.151489 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" event={"ID":"fa22af34-34f4-4fb8-8512-c5de1c159d63","Type":"ContainerStarted","Data":"795543fb45521cdf52f337846193e736828f7a9a53bf24cd9fff6a3795c75d7b"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.152129 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.160848 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" event={"ID":"53f12dd9-fe11-47df-9c75-d812a1a80309","Type":"ContainerStarted","Data":"35db8082a347b0917cd97e5b7caf2a16471ea1a77b981caa29225cd099d26d3b"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.176012 4856 generic.go:334] "Generic (PLEG): container finished" podID="34d7d362-ebe4-4476-8f52-646e974fb07d" containerID="5aa9513123fe69df5b997465f3796205b9fdd502c0c436e518b54d930fd071c2" exitCode=0 Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.176093 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" event={"ID":"34d7d362-ebe4-4476-8f52-646e974fb07d","Type":"ContainerDied","Data":"5aa9513123fe69df5b997465f3796205b9fdd502c0c436e518b54d930fd071c2"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.176119 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" event={"ID":"34d7d362-ebe4-4476-8f52-646e974fb07d","Type":"ContainerStarted","Data":"49a198b8b09bbf72e279cfe13ef02ee1ed493d3f541884b8c10ab00a82db1aec"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.182832 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" event={"ID":"eda44bb3-b2c2-468b-984d-88809371a6b7","Type":"ContainerStarted","Data":"3277889b7a2cf85a9412b4ad0a70fd27ee8c208b8a3b602404c3c453cdbb0b59"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.189926 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" event={"ID":"93046834-963c-4132-a184-d9541f761870","Type":"ContainerStarted","Data":"7618e51e73e0039b61c97a2d17d1be0f31367dc4a1d2480b536b0047fb758342"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.189959 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" event={"ID":"93046834-963c-4132-a184-d9541f761870","Type":"ContainerStarted","Data":"dfb4b55a9b473f59206c27249e35d80be627f8e1eb4db26675972f069fcbbad3"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.194123 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29410560-9nw7n" event={"ID":"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb","Type":"ContainerStarted","Data":"921f02f31aa48256a1d45581e3b6fd0eeec92d3b6ad02cfe1a726a9d9b251c49"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.197375 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5phk8" event={"ID":"819ff29c-f7f5-442f-8eb5-e7ccc25d2219","Type":"ContainerStarted","Data":"94760c88552ccfa08576e18c0055fb0a08868a8b8c5667e1fc0faf373497f0c3"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.205434 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" event={"ID":"760efad4-17b6-4a2c-8d36-68a59d2c60be","Type":"ContainerStarted","Data":"cfe93a6f36987bb9e7d0ddcfbb3161f52d09764470612de33b858c3765b11362"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.208799 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" event={"ID":"6cc6ab38-8ed6-468a-864c-25354ae45707","Type":"ContainerStarted","Data":"aa587bc089f6f4fd45e2d8f775fd3bee273c6fc2c7d6f61c057df7a25ee3a1a6"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.210061 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" event={"ID":"f2c1346e-6e8d-4acb-b329-88bf72eaef2b","Type":"ContainerStarted","Data":"3809a7d0c317851e9b69fc7fb21035b86b6b85d609176da241dfb40150b705da"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.215184 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s5rmh" event={"ID":"5b7f5674-184e-48f4-b455-239076effb38","Type":"ContainerStarted","Data":"81aa7b2024d73eb600f6c0c18db4c8b5f4730d1f1ea7ac0884812266602b5a8a"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.216808 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" event={"ID":"c6f67f96-77c7-473b-ae34-d0b5926ef9fa","Type":"ContainerStarted","Data":"e59ad53e541000085cbd99707c347b8f2f5b02465d22dd3240c458b51de8a1e6"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.227632 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" event={"ID":"4fe59379-8ff6-4c3b-aa26-7e65a11d405b","Type":"ContainerStarted","Data":"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.227667 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" event={"ID":"4fe59379-8ff6-4c3b-aa26-7e65a11d405b","Type":"ContainerStarted","Data":"a311675b19260da888c98ad6eede72071926e23d6e3768fe223b0baaffa24de5"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.229826 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.234219 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" event={"ID":"4f7bfbc8-c98c-4600-99b6-ec82ba542c49","Type":"ContainerStarted","Data":"e81875441e62d85d6a3c71cdb3de705b1cf941196c870a80624bcb101691f871"} Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.250483 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.251995 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.75196098 +0000 UTC m=+149.778329014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.274477 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.281381 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-6zlxx"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.352421 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.352713 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.852699661 +0000 UTC m=+149.879067665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.453639 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.454112 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:42.954092187 +0000 UTC m=+149.980460191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.524576 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.558824 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.559074 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.059063935 +0000 UTC m=+150.085431939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.661704 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.662028 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.162012941 +0000 UTC m=+150.188380945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.763441 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.764212 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.264190968 +0000 UTC m=+150.290558972 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.770331 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.776437 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:42 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:42 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:42 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.776482 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.833124 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.865330 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.865661 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.365617786 +0000 UTC m=+150.391985790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.866303 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.866939 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.366928419 +0000 UTC m=+150.393296423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.886900 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.897263 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-49wjq"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.900766 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.913971 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.916014 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vstlv"] Dec 02 00:08:42 crc kubenswrapper[4856]: W1202 00:08:42.923823 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f9c7450_45c7_4c5e_8b34_d128ee553a82.slice/crio-f78a76386de77bc18f9c2cb386066248d7583d7a3ec6634020bc1620a5e5b6f4 WatchSource:0}: Error finding container f78a76386de77bc18f9c2cb386066248d7583d7a3ec6634020bc1620a5e5b6f4: Status 404 returned error can't find the container with id f78a76386de77bc18f9c2cb386066248d7583d7a3ec6634020bc1620a5e5b6f4 Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.948378 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.951681 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.960175 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.976332 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:42 crc kubenswrapper[4856]: E1202 00:08:42.977001 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.476986036 +0000 UTC m=+150.503354040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.979946 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rlmv8"] Dec 02 00:08:42 crc kubenswrapper[4856]: I1202 00:08:42.994464 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.006754 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-89qzj"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.030329 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sdthq"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.052225 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.053525 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.074463 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-xls64"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.079461 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.080045 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.580024275 +0000 UTC m=+150.606392369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.090700 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.163411 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" podStartSLOduration=130.163386653 podStartE2EDuration="2m10.163386653s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.162719696 +0000 UTC m=+150.189087700" watchObservedRunningTime="2025-12-02 00:08:43.163386653 +0000 UTC m=+150.189754657" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.177076 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.183233 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.183623 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.683605777 +0000 UTC m=+150.709973771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.203778 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s2rvp" podStartSLOduration=131.203758829 podStartE2EDuration="2m11.203758829s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.199669295 +0000 UTC m=+150.226037289" watchObservedRunningTime="2025-12-02 00:08:43.203758829 +0000 UTC m=+150.230126833" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.223923 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.230297 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.237519 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.243188 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-5phk8" podStartSLOduration=130.243173151 podStartE2EDuration="2m10.243173151s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.242225437 +0000 UTC m=+150.268593441" watchObservedRunningTime="2025-12-02 00:08:43.243173151 +0000 UTC m=+150.269541155" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.251330 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" event={"ID":"93046834-963c-4132-a184-d9541f761870","Type":"ContainerStarted","Data":"65fbf4ba1021ff166ae8d1105c493627356373fd7e183e266a05e30d76413bf2"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.263183 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-89qzj" event={"ID":"7ecc82d8-17ea-4639-a4cf-2dea25574574","Type":"ContainerStarted","Data":"200e0a9cade0380f17c69850aabdcfcf9d40f4dfe8dee094c6d066961164efdf"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.277379 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4r66c"] Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.280757 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" event={"ID":"4f7bfbc8-c98c-4600-99b6-ec82ba542c49","Type":"ContainerStarted","Data":"ea0102d854b265a4e1a17c69e40360627d0dad6a28dc663e404fd100758717b2"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.280799 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" event={"ID":"4f7bfbc8-c98c-4600-99b6-ec82ba542c49","Type":"ContainerStarted","Data":"e11282240b9df07c34bb016dec5e4adc639397dd7c9256e1d699f13cbe49402f"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.286016 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.286280 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.786268126 +0000 UTC m=+150.812636120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.288265 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" event={"ID":"6cc6ab38-8ed6-468a-864c-25354ae45707","Type":"ContainerStarted","Data":"7050b9d2c2f6f25e2de25d185625ad6a56ffc565a23852375e46adbbeac732fa"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.289280 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.292926 4856 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7lsm8 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.292974 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.294132 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" event={"ID":"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38","Type":"ContainerStarted","Data":"4dcd8d3a608794514d38455bf4edb88ad432bf4f3d21764e05b3f6a3feb5e16c"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.308636 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s5rmh" event={"ID":"5b7f5674-184e-48f4-b455-239076effb38","Type":"ContainerStarted","Data":"019e28ebdda495c7bc37e3f67c29bf842056fc8509a1de23495600881644cac4"} Dec 02 00:08:43 crc kubenswrapper[4856]: W1202 00:08:43.310364 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c276957_366f_454e_bd11_de451d27e0be.slice/crio-9388a2159b96410c3e6475936d4216a5df0f068789fb93b17324ef247ee2daaf WatchSource:0}: Error finding container 9388a2159b96410c3e6475936d4216a5df0f068789fb93b17324ef247ee2daaf: Status 404 returned error can't find the container with id 9388a2159b96410c3e6475936d4216a5df0f068789fb93b17324ef247ee2daaf Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.313225 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-6zlxx" event={"ID":"23476e03-4f2c-426e-a180-9ad48a56d758","Type":"ContainerStarted","Data":"9542a453699fc47ca47f1d488e9f1153db2f8178395ec8b548d70d9b4edb28b8"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.313256 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-6zlxx" event={"ID":"23476e03-4f2c-426e-a180-9ad48a56d758","Type":"ContainerStarted","Data":"a30d3daacd81c85bc489aa329c5967be631a94488b582eb62d7bee8fec1ca5ff"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.315255 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" event={"ID":"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d","Type":"ContainerStarted","Data":"2b96c664b7d3b8a7b0b38bde1875526f197644c915e5a22e33cd4f20c9be9847"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.337696 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rkvl8" podStartSLOduration=131.337675273 podStartE2EDuration="2m11.337675273s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.336027671 +0000 UTC m=+150.362395665" watchObservedRunningTime="2025-12-02 00:08:43.337675273 +0000 UTC m=+150.364043287" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.359909 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" event={"ID":"1471be81-ab52-4a5c-b350-115301184a17","Type":"ContainerStarted","Data":"ffd5bd4c1d2e3fec8479ec7df0a418e487b274dea7b848e21a6057700820a8fb"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.390120 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.391420 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:43.891402838 +0000 UTC m=+150.917770842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.412079 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" event={"ID":"760efad4-17b6-4a2c-8d36-68a59d2c60be","Type":"ContainerStarted","Data":"943d451611304f86e9c3ce285133ec91949e92cb129ce4f2664f79f5568f7cd9"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.454840 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" event={"ID":"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa","Type":"ContainerStarted","Data":"16893ab30eb4c25181f2a28bd483599b95a9a4593aafd32036d361fe32bcce28"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.498823 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-4ztk7" podStartSLOduration=130.498804757 podStartE2EDuration="2m10.498804757s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.439165202 +0000 UTC m=+150.465533206" watchObservedRunningTime="2025-12-02 00:08:43.498804757 +0000 UTC m=+150.525172761" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.505965 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.506316 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.006270037 +0000 UTC m=+151.032638041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.516968 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" event={"ID":"96c76858-8564-4349-9f31-936cf700a541","Type":"ContainerStarted","Data":"11a7f27718340abff4bb16a0fcecee68e01458f637261b230a66373a2c5a6814"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.537794 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-s5rmh" podStartSLOduration=5.537779458 podStartE2EDuration="5.537779458s" podCreationTimestamp="2025-12-02 00:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.536156047 +0000 UTC m=+150.562524051" watchObservedRunningTime="2025-12-02 00:08:43.537779458 +0000 UTC m=+150.564147452" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.538395 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m8qvw" podStartSLOduration=130.538390003 podStartE2EDuration="2m10.538390003s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.501211409 +0000 UTC m=+150.527579413" watchObservedRunningTime="2025-12-02 00:08:43.538390003 +0000 UTC m=+150.564758007" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.555902 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" event={"ID":"11c6790a-0083-45a1-955d-af4fe38ac958","Type":"ContainerStarted","Data":"316f24a25488cb272d17eb0d0f5e51996c81ce709520804fc4a1848d8fb86948"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.556495 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" event={"ID":"11c6790a-0083-45a1-955d-af4fe38ac958","Type":"ContainerStarted","Data":"9db1a183e014860fd16bab0fa6ee491388897312175a6ea4e7ddae0ab87ba908"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.575854 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" podStartSLOduration=131.575833645 podStartE2EDuration="2m11.575833645s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.574161882 +0000 UTC m=+150.600529886" watchObservedRunningTime="2025-12-02 00:08:43.575833645 +0000 UTC m=+150.602201639" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.576391 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sdthq" event={"ID":"cc0b2623-e7d2-4497-b688-977a23f8d922","Type":"ContainerStarted","Data":"508ea58807b516a93d55ac996628c4008b08dc926d03f20a68159f95b750a353"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.601196 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-6zlxx" podStartSLOduration=131.601169769 podStartE2EDuration="2m11.601169769s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.597043544 +0000 UTC m=+150.623411558" watchObservedRunningTime="2025-12-02 00:08:43.601169769 +0000 UTC m=+150.627537773" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.610085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.611065 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.11104704 +0000 UTC m=+151.137415044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.611341 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" event={"ID":"eda44bb3-b2c2-468b-984d-88809371a6b7","Type":"ContainerStarted","Data":"9686103f90653059d18837a8e1611bd6f79e9b8d8c8e28081e1ed481a0d7c6da"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.679055 4856 generic.go:334] "Generic (PLEG): container finished" podID="53f12dd9-fe11-47df-9c75-d812a1a80309" containerID="27854c20ac76347a571df5d1d64d0833de73eecad444cdfc960d3bed3eae009c" exitCode=0 Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.679150 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" event={"ID":"53f12dd9-fe11-47df-9c75-d812a1a80309","Type":"ContainerDied","Data":"27854c20ac76347a571df5d1d64d0833de73eecad444cdfc960d3bed3eae009c"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.711161 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29410560-9nw7n" event={"ID":"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb","Type":"ContainerStarted","Data":"b76d19ac2e0cc065d560c2b6ed7708545d8363be7b79d92cc46817619a731ad9"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.711282 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.712438 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.212426796 +0000 UTC m=+151.238794800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.716974 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" event={"ID":"71754574-f7cd-4f47-916a-efd57a69e4ad","Type":"ContainerStarted","Data":"94565fc2c05c2126475438462646e0957c92dfc9bc3c9a52bd1a57594d85e8e3"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.736939 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" podStartSLOduration=131.736926059 podStartE2EDuration="2m11.736926059s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.736155649 +0000 UTC m=+150.762523653" watchObservedRunningTime="2025-12-02 00:08:43.736926059 +0000 UTC m=+150.763294063" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.737056 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-pdb4m" podStartSLOduration=130.737051892 podStartE2EDuration="2m10.737051892s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.656157146 +0000 UTC m=+150.682525150" watchObservedRunningTime="2025-12-02 00:08:43.737051892 +0000 UTC m=+150.763419896" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.740808 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" event={"ID":"f4c09d66-5b48-47b0-9696-4380fcc8edf3","Type":"ContainerStarted","Data":"0217aab757f61dec3113dd07e137a800b2ca256693d8e41e1b83ffde95f8408e"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.763755 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xls64" event={"ID":"a753f707-5591-486d-b36c-217cdf0ae9df","Type":"ContainerStarted","Data":"fe8f1b4ca0b025273aa44619bee8b8e3f7098e0ca83ebfd4ae7b08e99a8a79cd"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.780868 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" event={"ID":"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3","Type":"ContainerStarted","Data":"e3c1b79887646f092019176e57460411667081f55d197366758ab4ba26989fd1"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.784714 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:43 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:43 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:43 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.784746 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.787768 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" event={"ID":"782abbf8-ce88-46c6-bef2-708bca62dd57","Type":"ContainerStarted","Data":"2776a6f53f2207502cae57440df878095032d09afaa11985ef9efe86da40b455"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.792074 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" event={"ID":"263d4630-69c0-4f10-a3c6-7f88b7836533","Type":"ContainerStarted","Data":"6127111ac43e33787570f2fd03181c8891e791d86f3c136c843eb018084ce00b"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.809950 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29410560-9nw7n" podStartSLOduration=131.809938134 podStartE2EDuration="2m11.809938134s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.808999221 +0000 UTC m=+150.835367225" watchObservedRunningTime="2025-12-02 00:08:43.809938134 +0000 UTC m=+150.836306138" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.812381 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.813398 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" event={"ID":"c6f67f96-77c7-473b-ae34-d0b5926ef9fa","Type":"ContainerStarted","Data":"444ccb3924dc4a790b303829f0ef1241dafa08313778c45a05bac2099e62668f"} Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.815438 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.315417634 +0000 UTC m=+151.341785638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.816670 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" event={"ID":"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a","Type":"ContainerStarted","Data":"2e6437649e2ae202973978a9873d137165a157e38f0e652f0240f612355a79b4"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.858402 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" event={"ID":"f2c1346e-6e8d-4acb-b329-88bf72eaef2b","Type":"ContainerStarted","Data":"67ee9ef9febfcf328883ffea6ad0187c1bcb0c85313a96b83d748e78f2753ec9"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.858449 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" event={"ID":"f2c1346e-6e8d-4acb-b329-88bf72eaef2b","Type":"ContainerStarted","Data":"a80d7bc001024e5ffc9a2a8bfd114a20eb1133318e9b3a94b169a57262b925c3"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.861482 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" event={"ID":"44d156b7-bec9-4cf2-8a71-91c94a4db280","Type":"ContainerStarted","Data":"f63a23f9dd0b1bb59d5481433fee9b4f0678c02aff6f9a50136eb49cf909d1ad"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.873262 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" event={"ID":"34d7d362-ebe4-4476-8f52-646e974fb07d","Type":"ContainerStarted","Data":"afdd90f315f5a88373ae676ccffe675803b3af16e74c84ca8788093112431a68"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.873963 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.880962 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zktnf" podStartSLOduration=130.880947819 podStartE2EDuration="2m10.880947819s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:43.839543817 +0000 UTC m=+150.865911821" watchObservedRunningTime="2025-12-02 00:08:43.880947819 +0000 UTC m=+150.907315823" Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.896505 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" event={"ID":"0f9c7450-45c7-4c5e-8b34-d128ee553a82","Type":"ContainerStarted","Data":"f78a76386de77bc18f9c2cb386066248d7583d7a3ec6634020bc1620a5e5b6f4"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.898387 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" event={"ID":"8247592b-2d26-494b-a8ed-d4c1e052f7a6","Type":"ContainerStarted","Data":"1d8888a0ceb4aa6fcdee003ec105d51c52e294fcf3cb54b131f2d129b7ff23b2"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.900576 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" event={"ID":"6cc9abf3-4d0e-49ea-b140-b38a97769d4d","Type":"ContainerStarted","Data":"a194746e2d5b832a4af47a83e41ae7b435389e39f35085c76d82ed3954796561"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.900611 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" event={"ID":"6cc9abf3-4d0e-49ea-b140-b38a97769d4d","Type":"ContainerStarted","Data":"3d43d264397d3c8daebc3cfc83af83723b439a0d81fcb8597c4301f751ff32a7"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.901312 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" event={"ID":"ecbd556c-e5af-4f34-8351-ef9ff3416abe","Type":"ContainerStarted","Data":"e1b9a9795a5ea3d46f2b5b9c1f872b60cafb21e6b01dd470ebc226373ba60f35"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.903079 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" event={"ID":"f5b4c6d3-261d-4477-aac1-67034bf1a503","Type":"ContainerStarted","Data":"96a38c779675e25d3c484886506d7bebeaf4f911aa357e79dc4e32e6e07d486d"} Dec 02 00:08:43 crc kubenswrapper[4856]: I1202 00:08:43.915734 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:43 crc kubenswrapper[4856]: E1202 00:08:43.916453 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.416434611 +0000 UTC m=+151.442802615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.018326 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.019575 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.519560672 +0000 UTC m=+151.545928676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.046340 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-b9s78" podStartSLOduration=132.046323032 podStartE2EDuration="2m12.046323032s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:44.044889065 +0000 UTC m=+151.071257069" watchObservedRunningTime="2025-12-02 00:08:44.046323032 +0000 UTC m=+151.072691036" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.120825 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.121177 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.621163194 +0000 UTC m=+151.647531188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.225216 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.225514 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.725465485 +0000 UTC m=+151.751833489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.225868 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.226302 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.726284285 +0000 UTC m=+151.752652289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.327390 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.327746 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.827717333 +0000 UTC m=+151.854085337 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.428461 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.429087 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:44.929074949 +0000 UTC m=+151.955442953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.530666 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.531018 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.030958528 +0000 UTC m=+152.057326522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.531433 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.531861 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.031841911 +0000 UTC m=+152.058210095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.634379 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.634756 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.134736486 +0000 UTC m=+152.161104490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.640277 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" podStartSLOduration=131.640254656 podStartE2EDuration="2m11.640254656s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:44.602899687 +0000 UTC m=+151.629267691" watchObservedRunningTime="2025-12-02 00:08:44.640254656 +0000 UTC m=+151.666622660" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.693348 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-fwb5c" podStartSLOduration=131.693313184 podStartE2EDuration="2m11.693313184s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:44.640518773 +0000 UTC m=+151.666886777" watchObservedRunningTime="2025-12-02 00:08:44.693313184 +0000 UTC m=+151.719681188" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.694200 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z5psp" podStartSLOduration=131.694195597 podStartE2EDuration="2m11.694195597s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:44.693984221 +0000 UTC m=+151.720352225" watchObservedRunningTime="2025-12-02 00:08:44.694195597 +0000 UTC m=+151.720563601" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.736225 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.736732 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.236716787 +0000 UTC m=+152.263084791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.753467 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" podStartSLOduration=132.753446773 podStartE2EDuration="2m12.753446773s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:44.75177657 +0000 UTC m=+151.778144594" watchObservedRunningTime="2025-12-02 00:08:44.753446773 +0000 UTC m=+151.779814767" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.797650 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:44 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:44 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:44 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.797708 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.837125 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.837308 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.337281893 +0000 UTC m=+152.363649897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.837405 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.837809 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.337799676 +0000 UTC m=+152.364167670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.942443 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.942830 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.442802324 +0000 UTC m=+152.469170328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:44 crc kubenswrapper[4856]: I1202 00:08:44.943043 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:44 crc kubenswrapper[4856]: E1202 00:08:44.943383 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.443365938 +0000 UTC m=+152.469733942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.007110 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" event={"ID":"5fc26b3d-7f39-4821-a9f6-eafe39f3c335","Type":"ContainerStarted","Data":"09d0e083bced971b51c9109b5836e3a37240af230ef244b78322f8a3b268e966"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.033612 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" event={"ID":"5c276957-366f-454e-bd11-de451d27e0be","Type":"ContainerStarted","Data":"76ad12d22f08902dd84bcbc9b72a6019e4caff1615d6686b14d27f32f4bdcdee"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.033655 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" event={"ID":"5c276957-366f-454e-bd11-de451d27e0be","Type":"ContainerStarted","Data":"9388a2159b96410c3e6475936d4216a5df0f068789fb93b17324ef247ee2daaf"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.044836 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.045112 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.545097163 +0000 UTC m=+152.571465167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.062451 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-q58fj" podStartSLOduration=132.062435484 podStartE2EDuration="2m12.062435484s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.062100236 +0000 UTC m=+152.088468240" watchObservedRunningTime="2025-12-02 00:08:45.062435484 +0000 UTC m=+152.088803488" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.063920 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" event={"ID":"8247592b-2d26-494b-a8ed-d4c1e052f7a6","Type":"ContainerStarted","Data":"f93b6d89df68e1f59edbfe15c61de9cf8a0fa967644725e49c400f93b7ebf4e2"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.108008 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" event={"ID":"1471be81-ab52-4a5c-b350-115301184a17","Type":"ContainerStarted","Data":"6ce8a4c25db18b78cc13551cfc590361d750b3cde6282cce812a1e975542d315"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.147276 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.149174 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.649155928 +0000 UTC m=+152.675523932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.149987 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rlmv8" podStartSLOduration=132.149960318 podStartE2EDuration="2m12.149960318s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.143242348 +0000 UTC m=+152.169610362" watchObservedRunningTime="2025-12-02 00:08:45.149960318 +0000 UTC m=+152.176328322" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.168474 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-89qzj" event={"ID":"7ecc82d8-17ea-4639-a4cf-2dea25574574","Type":"ContainerStarted","Data":"7ed331f6dbef615dd0d0a943b4f204b695adb823a84fce2b1d0ee927ea8d1d67"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.168901 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.184765 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" event={"ID":"6cc9abf3-4d0e-49ea-b140-b38a97769d4d","Type":"ContainerStarted","Data":"cfcd9dae5d3c4bc9cfa56e453b58081871e1ca84ec925233600f708cbf64bd96"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.184852 4856 patch_prober.go:28] interesting pod/downloads-7954f5f757-89qzj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.189822 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-89qzj" podUID="7ecc82d8-17ea-4639-a4cf-2dea25574574" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.212084 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-89qzj" podStartSLOduration=133.212067987 podStartE2EDuration="2m13.212067987s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.21100274 +0000 UTC m=+152.237370734" watchObservedRunningTime="2025-12-02 00:08:45.212067987 +0000 UTC m=+152.238435991" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.214370 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerStarted","Data":"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.214428 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerStarted","Data":"d0f8f8885ba2ee64eb2cb6b36df0a3f4a753f36f83b64e41be5932d0c4bea550"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.214732 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.236886 4856 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dllbd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.236937 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.246833 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-chhj4" podStartSLOduration=133.24681878 podStartE2EDuration="2m13.24681878s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.245417174 +0000 UTC m=+152.271785178" watchObservedRunningTime="2025-12-02 00:08:45.24681878 +0000 UTC m=+152.273186784" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.248289 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.249737 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.749704673 +0000 UTC m=+152.776072677 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.296719 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" podStartSLOduration=132.296700468 podStartE2EDuration="2m12.296700468s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.293968398 +0000 UTC m=+152.320336402" watchObservedRunningTime="2025-12-02 00:08:45.296700468 +0000 UTC m=+152.323068472" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.328624 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ct88q" event={"ID":"0f9c7450-45c7-4c5e-8b34-d128ee553a82","Type":"ContainerStarted","Data":"0162fe6977dc0d132995b6dc20423447450d8a4451bb7199b613f486475817af"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.342638 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" event={"ID":"ecbd556c-e5af-4f34-8351-ef9ff3416abe","Type":"ContainerStarted","Data":"268236b9c1277a7e20a8f46da5470ab54aff8573fa3ff22c505759189dddebdc"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.350663 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.353097 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.85307083 +0000 UTC m=+152.879438834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.371862 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" event={"ID":"96c76858-8564-4349-9f31-936cf700a541","Type":"ContainerStarted","Data":"c67a7bb62e8fdf1a7786bd044354b6d716c8d3b1b1d095adbbbbcee65f960033"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.396894 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" event={"ID":"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53","Type":"ContainerStarted","Data":"d0f394787664799ef9d11ce36c295536299e694d38c2f46be3fbdbcee703c051"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.424833 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" event={"ID":"71754574-f7cd-4f47-916a-efd57a69e4ad","Type":"ContainerStarted","Data":"888a98d2422120651cb8fe7016e056431aa5e337c35452d93c2c04adbbd842cf"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.449194 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" event={"ID":"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3","Type":"ContainerStarted","Data":"4698805b0bbfa539a850b9bb44a4fc1eb5546eb0d4d856c1a469f9ca1029dee0"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.452460 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.453836 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:45.953582105 +0000 UTC m=+152.979950109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.502635 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z6j5r" podStartSLOduration=132.502615671 podStartE2EDuration="2m12.502615671s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.50101903 +0000 UTC m=+152.527387024" watchObservedRunningTime="2025-12-02 00:08:45.502615671 +0000 UTC m=+152.528983675" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.503421 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" event={"ID":"782abbf8-ce88-46c6-bef2-708bca62dd57","Type":"ContainerStarted","Data":"27f8f0781fd2165ba42ecacb2de1e12fecc71dd55c3e51e62a464a01a9523429"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.512597 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" event={"ID":"44d156b7-bec9-4cf2-8a71-91c94a4db280","Type":"ContainerStarted","Data":"c00c2d3801fcc78afbe179159188d0bc30f803f17f2a94001c74673c4f67b45a"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.522283 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sdthq" event={"ID":"cc0b2623-e7d2-4497-b688-977a23f8d922","Type":"ContainerStarted","Data":"902e67bdcb1f63f4e523743e9a9abdea07dc951dcee452ec705efc06dc972d3f"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.524076 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.524742 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.525973 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4r66c" event={"ID":"3c80eeea-e581-44bc-b901-10fb429f27e3","Type":"ContainerStarted","Data":"c63b390ad3af434d007d1ef006c408f881a5408daf80faee1bbcd2acaaec0cb7"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.526017 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4r66c" event={"ID":"3c80eeea-e581-44bc-b901-10fb429f27e3","Type":"ContainerStarted","Data":"e065466c48ac5a7f95e038173a2de32a06f338d68641effedc285af4bce2114b"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.533692 4856 patch_prober.go:28] interesting pod/apiserver-76f77b778f-4mgld container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]log ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]etcd ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/generic-apiserver-start-informers ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/max-in-flight-filter ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 02 00:08:45 crc kubenswrapper[4856]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 02 00:08:45 crc kubenswrapper[4856]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/project.openshift.io-projectcache ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/openshift.io-startinformers ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 02 00:08:45 crc kubenswrapper[4856]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 02 00:08:45 crc kubenswrapper[4856]: livez check failed Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.533745 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" podUID="11c6790a-0083-45a1-955d-af4fe38ac958" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.555227 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.556281 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.056267234 +0000 UTC m=+153.082635358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.568292 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" event={"ID":"08442f50-cb64-4c0a-a8c6-ecd34ad7aa38","Type":"ContainerStarted","Data":"3d0fb7d0c143ea1ad2330fb5b66666d67664f7e31c9b31a86e304a532336a042"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.569518 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.591841 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" event={"ID":"263d4630-69c0-4f10-a3c6-7f88b7836533","Type":"ContainerStarted","Data":"299e32011769e79eb7e8b6077ff88d60d41a739e4b12db4bd5b6f2ae8a8783e7"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.596945 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.612019 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" event={"ID":"f4c09d66-5b48-47b0-9696-4380fcc8edf3","Type":"ContainerStarted","Data":"b4af9ed24abeaec24806b21d581f0cd7360c51f248271e9bee679d362a39ce60"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.612845 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.623898 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.628656 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" podStartSLOduration=132.628640744 podStartE2EDuration="2m12.628640744s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.5655301 +0000 UTC m=+152.591898104" watchObservedRunningTime="2025-12-02 00:08:45.628640744 +0000 UTC m=+152.655008748" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.629378 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" podStartSLOduration=132.629375282 podStartE2EDuration="2m12.629375282s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.627791972 +0000 UTC m=+152.654159966" watchObservedRunningTime="2025-12-02 00:08:45.629375282 +0000 UTC m=+152.655743286" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.638948 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" event={"ID":"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d","Type":"ContainerStarted","Data":"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.639989 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.648390 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.658664 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.660451 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.160431931 +0000 UTC m=+153.186799935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.662611 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" event={"ID":"c07b10f2-4b70-4a76-9fbb-45e1a6f4d5aa","Type":"ContainerStarted","Data":"2863cba1f3aa8847113f68499cd21bcfbd574934212cca9d5daa9be630ab7c67"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.665602 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" event={"ID":"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a","Type":"ContainerStarted","Data":"7206dbc5cf29afd4e3f605b4649d3c0dca56cec2373772d898ac60e03e52754f"} Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.690917 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-54k5c" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.699549 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.764068 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-49zfd" podStartSLOduration=133.764050355 podStartE2EDuration="2m13.764050355s" podCreationTimestamp="2025-12-02 00:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.763931402 +0000 UTC m=+152.790299416" watchObservedRunningTime="2025-12-02 00:08:45.764050355 +0000 UTC m=+152.790418359" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.765391 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ftzrj" podStartSLOduration=132.765385789 podStartE2EDuration="2m12.765385789s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.720865107 +0000 UTC m=+152.747233111" watchObservedRunningTime="2025-12-02 00:08:45.765385789 +0000 UTC m=+152.791753793" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.765450 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.770207 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.270193611 +0000 UTC m=+153.296561615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.781815 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:45 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:45 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:45 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.781901 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.868944 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.869290 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.369272659 +0000 UTC m=+153.395640663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.869581 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" podStartSLOduration=132.869560646 podStartE2EDuration="2m12.869560646s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.863073221 +0000 UTC m=+152.889441225" watchObservedRunningTime="2025-12-02 00:08:45.869560646 +0000 UTC m=+152.895928650" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.886923 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" podStartSLOduration=132.886903847 podStartE2EDuration="2m12.886903847s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.836332872 +0000 UTC m=+152.862700876" watchObservedRunningTime="2025-12-02 00:08:45.886903847 +0000 UTC m=+152.913271851" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.902315 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tj4lm" podStartSLOduration=132.902277778 podStartE2EDuration="2m12.902277778s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.89921758 +0000 UTC m=+152.925585584" watchObservedRunningTime="2025-12-02 00:08:45.902277778 +0000 UTC m=+152.928645782" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.935433 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-4r66c" podStartSLOduration=7.93541578 podStartE2EDuration="7.93541578s" podCreationTimestamp="2025-12-02 00:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.934938868 +0000 UTC m=+152.961306872" watchObservedRunningTime="2025-12-02 00:08:45.93541578 +0000 UTC m=+152.961783784" Dec 02 00:08:45 crc kubenswrapper[4856]: I1202 00:08:45.971652 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:45 crc kubenswrapper[4856]: E1202 00:08:45.972010 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.471984379 +0000 UTC m=+153.498352383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.007777 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" podStartSLOduration=133.007759908 podStartE2EDuration="2m13.007759908s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.007502042 +0000 UTC m=+153.033870116" watchObservedRunningTime="2025-12-02 00:08:46.007759908 +0000 UTC m=+153.034127912" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.008244 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-7s95v" podStartSLOduration=133.008238181 podStartE2EDuration="2m13.008238181s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:45.972346708 +0000 UTC m=+152.998714712" watchObservedRunningTime="2025-12-02 00:08:46.008238181 +0000 UTC m=+153.034606185" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.045186 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzp47" podStartSLOduration=133.045170589 podStartE2EDuration="2m13.045170589s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.044099112 +0000 UTC m=+153.070467106" watchObservedRunningTime="2025-12-02 00:08:46.045170589 +0000 UTC m=+153.071538593" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.075058 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.075357 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.575342616 +0000 UTC m=+153.601710620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.129617 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-t8qsp" podStartSLOduration=133.129602505 podStartE2EDuration="2m13.129602505s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.128487567 +0000 UTC m=+153.154855571" watchObservedRunningTime="2025-12-02 00:08:46.129602505 +0000 UTC m=+153.155970509" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.177092 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.177545 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.677532863 +0000 UTC m=+153.703900867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.277803 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.278162 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.77814742 +0000 UTC m=+153.804515424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.379761 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.380138 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.880122401 +0000 UTC m=+153.906490395 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.481307 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.481534 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.981501198 +0000 UTC m=+154.007869192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.481629 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.481962 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:46.981954469 +0000 UTC m=+154.008322473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.582072 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.582384 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.082370231 +0000 UTC m=+154.108738235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.671547 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" event={"ID":"53f12dd9-fe11-47df-9c75-d812a1a80309","Type":"ContainerStarted","Data":"0f42db06e8866b510378d433160948ccd8ad2dc66f043bb9b9f47567d284081d"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.673782 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" event={"ID":"8247592b-2d26-494b-a8ed-d4c1e052f7a6","Type":"ContainerStarted","Data":"ff6d9428b0679f16783d07392326117e088cc5a753c3f735d18f0de1640a1b55"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.674126 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.675715 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-wlphq" event={"ID":"ffb4b3e1-a0b9-40fc-9b6f-f34549a866b3","Type":"ContainerStarted","Data":"2940f94a6ddcc4151a5deca7c5a8cc6ccd424dc0cf3f424eba62fc5ede2e7766"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.677844 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sdthq" event={"ID":"cc0b2623-e7d2-4497-b688-977a23f8d922","Type":"ContainerStarted","Data":"b5f8198a2c50f83d43fdd2380dee1a75a50e11d42c9c331cb16def15e22f998c"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.678254 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.680130 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xls64" event={"ID":"a753f707-5591-486d-b36c-217cdf0ae9df","Type":"ContainerStarted","Data":"f050136ae2755658338e34f40390b165d7e6276578b25ef068a71906909f7e1f"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.680153 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xls64" event={"ID":"a753f707-5591-486d-b36c-217cdf0ae9df","Type":"ContainerStarted","Data":"9165c71b1257c0ac3ea5773503629cb7196c3b2ebc803842b4832090ffe3c8c5"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.681512 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" event={"ID":"5fc26b3d-7f39-4821-a9f6-eafe39f3c335","Type":"ContainerStarted","Data":"21b862e9b40de9460570ebf77ac37cb24a840db3072ebe989a955cab8c5d3396"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.682945 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.683151 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" event={"ID":"ecbd556c-e5af-4f34-8351-ef9ff3416abe","Type":"ContainerStarted","Data":"d513b5c5382949dcaa8fae984100c8a84ec265ca62ef5174e55ac1aa116efb21"} Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.683320 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.183308997 +0000 UTC m=+154.209677001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.685150 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-49wjq" event={"ID":"9c4c1d11-deb4-4971-bf3e-768b4b30ee6a","Type":"ContainerStarted","Data":"531b91090e221b35d42620c380431761e11bfe6ced932665fb4b5f068c329fd3"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.693986 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6phhd" event={"ID":"44d156b7-bec9-4cf2-8a71-91c94a4db280","Type":"ContainerStarted","Data":"f3ced93e7ceb0d5e414229b9f34ecb78459729c6456bc5eb76903e8aa8fa5536"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.695836 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" event={"ID":"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53","Type":"ContainerStarted","Data":"eb97cd567584d7df524f939c2f43c0878e2d74a85d1ff1511beb230bdddcd6cc"} Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.696701 4856 patch_prober.go:28] interesting pod/downloads-7954f5f757-89qzj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.696758 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-89qzj" podUID="7ecc82d8-17ea-4639-a4cf-2dea25574574" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.696788 4856 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dllbd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.696822 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.742330 4856 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.778501 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:46 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:46 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:46 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.778550 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.784249 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.785175 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.285155175 +0000 UTC m=+154.311523179 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.785317 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.789335 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.289318731 +0000 UTC m=+154.315686735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.839523 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" podStartSLOduration=133.839507126 podStartE2EDuration="2m13.839507126s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.802119606 +0000 UTC m=+153.828487610" watchObservedRunningTime="2025-12-02 00:08:46.839507126 +0000 UTC m=+153.865875130" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.890958 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" podStartSLOduration=133.890941423 podStartE2EDuration="2m13.890941423s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.848462414 +0000 UTC m=+153.874830418" watchObservedRunningTime="2025-12-02 00:08:46.890941423 +0000 UTC m=+153.917309427" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.892231 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.892775 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.3927606 +0000 UTC m=+154.419128604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.901003 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.904034 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.904996 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-vstlv" podStartSLOduration=133.90497739 podStartE2EDuration="2m13.90497739s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.896894985 +0000 UTC m=+153.923262989" watchObservedRunningTime="2025-12-02 00:08:46.90497739 +0000 UTC m=+153.931345394" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.908559 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.910038 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.952281 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" podStartSLOduration=133.952264142 podStartE2EDuration="2m13.952264142s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.949346108 +0000 UTC m=+153.975714112" watchObservedRunningTime="2025-12-02 00:08:46.952264142 +0000 UTC m=+153.978632146" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.993244 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.993278 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.993295 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l2km\" (UniqueName: \"kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:46 crc kubenswrapper[4856]: I1202 00:08:46.993328 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:46 crc kubenswrapper[4856]: E1202 00:08:46.993601 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.493574292 +0000 UTC m=+154.519942296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.017448 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-sdthq" podStartSLOduration=9.017427488 podStartE2EDuration="9.017427488s" podCreationTimestamp="2025-12-02 00:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:46.980872609 +0000 UTC m=+154.007240613" watchObservedRunningTime="2025-12-02 00:08:47.017427488 +0000 UTC m=+154.043795492" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.045481 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.046364 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.068007 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.072166 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.093987 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.094200 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.094234 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.094249 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l2km\" (UniqueName: \"kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: E1202 00:08:47.094492 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.594466516 +0000 UTC m=+154.620834520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.095040 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.100227 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.124479 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l2km\" (UniqueName: \"kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km\") pod \"certified-operators-xzzbh\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.195682 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.196047 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q98rj\" (UniqueName: \"kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.196152 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.196274 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: E1202 00:08:47.196654 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.696639752 +0000 UTC m=+154.723007756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.233720 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.249315 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.250498 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.271534 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.297441 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:47 crc kubenswrapper[4856]: E1202 00:08:47.297698 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.79767489 +0000 UTC m=+154.824042894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.297912 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.297976 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.297999 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.298023 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q98rj\" (UniqueName: \"kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.298476 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: E1202 00:08:47.298517 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 00:08:47.798499001 +0000 UTC m=+154.824867005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2d7ss" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.298577 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.340543 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q98rj\" (UniqueName: \"kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj\") pod \"community-operators-j2276\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.385925 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.384800 4856 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T00:08:46.742360687Z","Handler":null,"Name":""} Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.389183 4856 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.389220 4856 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.411371 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.411571 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.411621 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.411719 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l48ws\" (UniqueName: \"kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.467056 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.468784 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.499731 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.503162 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.514986 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.515035 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.515077 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.515169 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l48ws\" (UniqueName: \"kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.519703 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.520239 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.537382 4856 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.537421 4856 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.558299 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l48ws\" (UniqueName: \"kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws\") pod \"certified-operators-2mw6j\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.596495 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.625007 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.625064 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbkbx\" (UniqueName: \"kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.625096 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.662669 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2d7ss\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.682256 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.724477 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xls64" event={"ID":"a753f707-5591-486d-b36c-217cdf0ae9df","Type":"ContainerStarted","Data":"719864e30cd282e547c849f02fb3eeeb201bdfbd12e908d79fdf521debee57c4"} Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.724533 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-xls64" event={"ID":"a753f707-5591-486d-b36c-217cdf0ae9df","Type":"ContainerStarted","Data":"07d8a9c1568f6e9208cd3ea7dd101f9af76e3e103cf4acb21084942349b1415e"} Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.725739 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.725791 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbkbx\" (UniqueName: \"kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.725864 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.726479 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.726745 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.732444 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerStarted","Data":"001af3b1fecbf945210762b6da8d55c261e19db67559d7f8b07910c030e15cf7"} Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.736614 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.740726 4856 patch_prober.go:28] interesting pod/downloads-7954f5f757-89qzj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.741136 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-89qzj" podUID="7ecc82d8-17ea-4639-a4cf-2dea25574574" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.758015 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbkbx\" (UniqueName: \"kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx\") pod \"community-operators-v5shq\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.760626 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-xls64" podStartSLOduration=9.760549554 podStartE2EDuration="9.760549554s" podCreationTimestamp="2025-12-02 00:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:47.758647495 +0000 UTC m=+154.785015499" watchObservedRunningTime="2025-12-02 00:08:47.760549554 +0000 UTC m=+154.786917558" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.768764 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-2vc5h" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.775626 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:47 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:47 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:47 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.775676 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.847505 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.891854 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:47 crc kubenswrapper[4856]: I1202 00:08:47.914869 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.021217 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.021883 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.043306 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.043494 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.060384 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.147230 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.147273 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.221453 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:08:48 crc kubenswrapper[4856]: W1202 00:08:48.244578 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba515d9b_e262_408a_a28d_04c006e8a922.slice/crio-d35540e9b44b26aa994d14b5992e6b7c81c986b50f074439055ec65e27390e6d WatchSource:0}: Error finding container d35540e9b44b26aa994d14b5992e6b7c81c986b50f074439055ec65e27390e6d: Status 404 returned error can't find the container with id d35540e9b44b26aa994d14b5992e6b7c81c986b50f074439055ec65e27390e6d Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.252865 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.252903 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.253258 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.285171 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.412680 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.474991 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:08:48 crc kubenswrapper[4856]: W1202 00:08:48.533354 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeebf27f1_cb8d_4ec3_8982_afb66867cda1.slice/crio-e3e49a057036e0d634c1ec523485975d3576a57c2813185f10f75e43a44f085c WatchSource:0}: Error finding container e3e49a057036e0d634c1ec523485975d3576a57c2813185f10f75e43a44f085c: Status 404 returned error can't find the container with id e3e49a057036e0d634c1ec523485975d3576a57c2813185f10f75e43a44f085c Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.541908 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:08:48 crc kubenswrapper[4856]: W1202 00:08:48.552824 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb47edae5_e6ef_4ac7_9dfd_9b8be6659aa6.slice/crio-18f929a68cf1ad9dfb5570205793c43029f98542cbd785dd4469d98409d854bc WatchSource:0}: Error finding container 18f929a68cf1ad9dfb5570205793c43029f98542cbd785dd4469d98409d854bc: Status 404 returned error can't find the container with id 18f929a68cf1ad9dfb5570205793c43029f98542cbd785dd4469d98409d854bc Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.627506 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 00:08:48 crc kubenswrapper[4856]: W1202 00:08:48.653899 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podceb99e11_0f06_4b35_965e_1e595de116d8.slice/crio-d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409 WatchSource:0}: Error finding container d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409: Status 404 returned error can't find the container with id d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409 Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.749210 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" event={"ID":"eebf27f1-cb8d-4ec3-8982-afb66867cda1","Type":"ContainerStarted","Data":"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.749261 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" event={"ID":"eebf27f1-cb8d-4ec3-8982-afb66867cda1","Type":"ContainerStarted","Data":"e3e49a057036e0d634c1ec523485975d3576a57c2813185f10f75e43a44f085c"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.749370 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.750979 4856 generic.go:334] "Generic (PLEG): container finished" podID="884edeee-5df9-4820-be36-38b7095706ef" containerID="28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c" exitCode=0 Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.751099 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerDied","Data":"28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.751122 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerStarted","Data":"e6a985502718d50c06d3f686a9761ccd470842194856d767eaa3e1d662785bdf"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.752228 4856 generic.go:334] "Generic (PLEG): container finished" podID="ba515d9b-e262-408a-a28d-04c006e8a922" containerID="c8bba70ea2c36af4b06d8c8731803c83f2aa85ab72d537e77f7a124ca3cdaef9" exitCode=0 Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.752298 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerDied","Data":"c8bba70ea2c36af4b06d8c8731803c83f2aa85ab72d537e77f7a124ca3cdaef9"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.752325 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerStarted","Data":"d35540e9b44b26aa994d14b5992e6b7c81c986b50f074439055ec65e27390e6d"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.752921 4856 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.758348 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ceb99e11-0f06-4b35-965e-1e595de116d8","Type":"ContainerStarted","Data":"d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.760787 4856 generic.go:334] "Generic (PLEG): container finished" podID="6efab283-f656-41a4-8996-4aee7986e931" containerID="ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287" exitCode=0 Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.760858 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerDied","Data":"ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.766483 4856 generic.go:334] "Generic (PLEG): container finished" podID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerID="36afff96001e7bd909106a7fb2f68996ea6d06ce58ad77308b3852b4a28d5971" exitCode=0 Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.768082 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerDied","Data":"36afff96001e7bd909106a7fb2f68996ea6d06ce58ad77308b3852b4a28d5971"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.768139 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerStarted","Data":"18f929a68cf1ad9dfb5570205793c43029f98542cbd785dd4469d98409d854bc"} Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.771961 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" podStartSLOduration=135.771924356 podStartE2EDuration="2m15.771924356s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:08:48.767987645 +0000 UTC m=+155.794355659" watchObservedRunningTime="2025-12-02 00:08:48.771924356 +0000 UTC m=+155.798292360" Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.775094 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:48 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:48 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:48 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:48 crc kubenswrapper[4856]: I1202 00:08:48.775147 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.045726 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.047280 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.048893 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.056396 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.174754 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.175265 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f6g2\" (UniqueName: \"kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.175539 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.259398 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.277034 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f6g2\" (UniqueName: \"kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.277170 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.277211 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.277745 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.277789 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.298460 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f6g2\" (UniqueName: \"kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2\") pod \"redhat-marketplace-ckmlx\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.370256 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.444684 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.445613 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.468023 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.580317 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.580468 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hklgb\" (UniqueName: \"kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.580503 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.589628 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:08:49 crc kubenswrapper[4856]: W1202 00:08:49.615542 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcc97516_b9c2_49cd_b9f5_c6cabfe4a3bd.slice/crio-7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353 WatchSource:0}: Error finding container 7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353: Status 404 returned error can't find the container with id 7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353 Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.681998 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hklgb\" (UniqueName: \"kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.682052 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.682118 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.682554 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.682819 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.702396 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hklgb\" (UniqueName: \"kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb\") pod \"redhat-marketplace-vhxwd\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.774496 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:49 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:49 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:49 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.774552 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.780710 4856 generic.go:334] "Generic (PLEG): container finished" podID="7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" containerID="eb97cd567584d7df524f939c2f43c0878e2d74a85d1ff1511beb230bdddcd6cc" exitCode=0 Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.780775 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" event={"ID":"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53","Type":"ContainerDied","Data":"eb97cd567584d7df524f939c2f43c0878e2d74a85d1ff1511beb230bdddcd6cc"} Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.784834 4856 generic.go:334] "Generic (PLEG): container finished" podID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerID="2c98168c88d70331f7635fae664e9e920694bed0fbeff98def733da40efa1420" exitCode=0 Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.784921 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerDied","Data":"2c98168c88d70331f7635fae664e9e920694bed0fbeff98def733da40efa1420"} Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.784943 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerStarted","Data":"7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353"} Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.785333 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.786996 4856 generic.go:334] "Generic (PLEG): container finished" podID="ceb99e11-0f06-4b35-965e-1e595de116d8" containerID="56e55bfe7b871dca851803a0ca9bb60f98900ee345ef4ea56eeec0d4536c1af8" exitCode=0 Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.787049 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ceb99e11-0f06-4b35-965e-1e595de116d8","Type":"ContainerDied","Data":"56e55bfe7b871dca851803a0ca9bb60f98900ee345ef4ea56eeec0d4536c1af8"} Dec 02 00:08:49 crc kubenswrapper[4856]: I1202 00:08:49.995992 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.044818 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.046036 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.048835 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.059343 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.191262 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.191409 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.191451 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmw47\" (UniqueName: \"kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.292898 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.293633 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.293911 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.293995 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmw47\" (UniqueName: \"kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.294300 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.313858 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmw47\" (UniqueName: \"kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47\") pod \"redhat-operators-5r988\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.389419 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.451281 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.452765 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.459357 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.530239 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.535672 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-4mgld" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.602701 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.603011 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljwxv\" (UniqueName: \"kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.603052 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.704872 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.705521 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.705599 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljwxv\" (UniqueName: \"kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.706272 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.707174 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.710725 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.740709 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljwxv\" (UniqueName: \"kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv\") pod \"redhat-operators-s2slc\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.764328 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.764386 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.769146 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.770215 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.776805 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:50 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:50 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:50 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.776875 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.798822 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.810732 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerStarted","Data":"5e4318ea2f524a6ae4504ed33c60e24617674495b7b2ed579d964e603abbd0b0"} Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.812899 4856 generic.go:334] "Generic (PLEG): container finished" podID="7763d51e-74d4-4bb9-b956-e33a31753604" containerID="dfc3741403b5d58289709fc58221116c2abbf3e0052be6d66eba58776f0a90b2" exitCode=0 Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.813542 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerDied","Data":"dfc3741403b5d58289709fc58221116c2abbf3e0052be6d66eba58776f0a90b2"} Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.813569 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerStarted","Data":"3905196c6ecd843c25af81afa2381776ba4920db128e69b5f639b71639406c87"} Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.826405 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7kxnr" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.946102 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.946154 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.956732 4856 patch_prober.go:28] interesting pod/console-f9d7485db-6zlxx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.957160 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-6zlxx" podUID="23476e03-4f2c-426e-a180-9ad48a56d758" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.957969 4856 patch_prober.go:28] interesting pod/downloads-7954f5f757-89qzj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.958000 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-89qzj" podUID="7ecc82d8-17ea-4639-a4cf-2dea25574574" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.959473 4856 patch_prober.go:28] interesting pod/downloads-7954f5f757-89qzj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 00:08:50 crc kubenswrapper[4856]: I1202 00:08:50.959506 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-89qzj" podUID="7ecc82d8-17ea-4639-a4cf-2dea25574574" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.157929 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.208408 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.320772 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access\") pod \"ceb99e11-0f06-4b35-965e-1e595de116d8\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.320841 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir\") pod \"ceb99e11-0f06-4b35-965e-1e595de116d8\" (UID: \"ceb99e11-0f06-4b35-965e-1e595de116d8\") " Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.320878 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume\") pod \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.320949 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksb4r\" (UniqueName: \"kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r\") pod \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.321018 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume\") pod \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\" (UID: \"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53\") " Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.322056 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ceb99e11-0f06-4b35-965e-1e595de116d8" (UID: "ceb99e11-0f06-4b35-965e-1e595de116d8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.322701 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume" (OuterVolumeSpecName: "config-volume") pod "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" (UID: "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.333104 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ceb99e11-0f06-4b35-965e-1e595de116d8" (UID: "ceb99e11-0f06-4b35-965e-1e595de116d8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.333334 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" (UID: "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.343617 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r" (OuterVolumeSpecName: "kube-api-access-ksb4r") pod "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" (UID: "7accf6cf-1c4a-40d7-80d9-5b4f92a46d53"). InnerVolumeSpecName "kube-api-access-ksb4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.386795 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:08:51 crc kubenswrapper[4856]: W1202 00:08:51.419119 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b04b8c_61cf_4bea_9eda_db2fc30e2247.slice/crio-6feecb096eb5c4e27c373f62ec41bc741a2590edf07d4fd3e456c7cbc3275f59 WatchSource:0}: Error finding container 6feecb096eb5c4e27c373f62ec41bc741a2590edf07d4fd3e456c7cbc3275f59: Status 404 returned error can't find the container with id 6feecb096eb5c4e27c373f62ec41bc741a2590edf07d4fd3e456c7cbc3275f59 Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.422598 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ceb99e11-0f06-4b35-965e-1e595de116d8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.422620 4856 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ceb99e11-0f06-4b35-965e-1e595de116d8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.422632 4856 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.422646 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksb4r\" (UniqueName: \"kubernetes.io/projected/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-kube-api-access-ksb4r\") on node \"crc\" DevicePath \"\"" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.422658 4856 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7accf6cf-1c4a-40d7-80d9-5b4f92a46d53-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.778801 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:51 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:51 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:51 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.779114 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.799207 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.833755 4856 generic.go:334] "Generic (PLEG): container finished" podID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerID="9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c" exitCode=0 Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.833951 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerDied","Data":"9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c"} Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.844979 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.844979 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410560-wj24r" event={"ID":"7accf6cf-1c4a-40d7-80d9-5b4f92a46d53","Type":"ContainerDied","Data":"d0f394787664799ef9d11ce36c295536299e694d38c2f46be3fbdbcee703c051"} Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.845088 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0f394787664799ef9d11ce36c295536299e694d38c2f46be3fbdbcee703c051" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.850023 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerID="4b79351a7b6ae82c8381d9bc51fdbd614746961e8d356e76028e68570494f2ac" exitCode=0 Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.850087 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerDied","Data":"4b79351a7b6ae82c8381d9bc51fdbd614746961e8d356e76028e68570494f2ac"} Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.850113 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerStarted","Data":"6feecb096eb5c4e27c373f62ec41bc741a2590edf07d4fd3e456c7cbc3275f59"} Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.864258 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ceb99e11-0f06-4b35-965e-1e595de116d8","Type":"ContainerDied","Data":"d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409"} Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.864296 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d978a0069ad16aa42d99a83a08528ec02e1c9340b9dd3949729d7dff50b1f409" Dec 02 00:08:51 crc kubenswrapper[4856]: I1202 00:08:51.864320 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.627802 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 00:08:52 crc kubenswrapper[4856]: E1202 00:08:52.628159 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" containerName="collect-profiles" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.628185 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" containerName="collect-profiles" Dec 02 00:08:52 crc kubenswrapper[4856]: E1202 00:08:52.628202 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb99e11-0f06-4b35-965e-1e595de116d8" containerName="pruner" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.628210 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb99e11-0f06-4b35-965e-1e595de116d8" containerName="pruner" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.628336 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7accf6cf-1c4a-40d7-80d9-5b4f92a46d53" containerName="collect-profiles" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.628356 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceb99e11-0f06-4b35-965e-1e595de116d8" containerName="pruner" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.630044 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.642505 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.646210 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.646422 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.742944 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.743105 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.779312 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:52 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:52 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:52 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.779390 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.844831 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.844942 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.845068 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.889509 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:52 crc kubenswrapper[4856]: I1202 00:08:52.968772 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:08:53 crc kubenswrapper[4856]: I1202 00:08:53.531203 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 00:08:53 crc kubenswrapper[4856]: W1202 00:08:53.568345 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod10b086f7_8e8e_4ebc_9036_941115221bde.slice/crio-2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13 WatchSource:0}: Error finding container 2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13: Status 404 returned error can't find the container with id 2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13 Dec 02 00:08:53 crc kubenswrapper[4856]: I1202 00:08:53.774692 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:53 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:53 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:53 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:53 crc kubenswrapper[4856]: I1202 00:08:53.774749 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:53 crc kubenswrapper[4856]: I1202 00:08:53.899811 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"10b086f7-8e8e-4ebc-9036-941115221bde","Type":"ContainerStarted","Data":"2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13"} Dec 02 00:08:54 crc kubenswrapper[4856]: I1202 00:08:54.774141 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:54 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:54 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:54 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:54 crc kubenswrapper[4856]: I1202 00:08:54.774494 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:54 crc kubenswrapper[4856]: I1202 00:08:54.907904 4856 generic.go:334] "Generic (PLEG): container finished" podID="10b086f7-8e8e-4ebc-9036-941115221bde" containerID="3dcca13c21f2df8819d0c3631fc2af776537e9740c85114b1d3918bd734cd5ed" exitCode=0 Dec 02 00:08:54 crc kubenswrapper[4856]: I1202 00:08:54.907945 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"10b086f7-8e8e-4ebc-9036-941115221bde","Type":"ContainerDied","Data":"3dcca13c21f2df8819d0c3631fc2af776537e9740c85114b1d3918bd734cd5ed"} Dec 02 00:08:55 crc kubenswrapper[4856]: I1202 00:08:55.288396 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:55 crc kubenswrapper[4856]: I1202 00:08:55.308002 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbedea3d-dea3-407d-aae3-2ac725bcab34-metrics-certs\") pod \"network-metrics-daemon-4zvgr\" (UID: \"cbedea3d-dea3-407d-aae3-2ac725bcab34\") " pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:55 crc kubenswrapper[4856]: I1202 00:08:55.334548 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4zvgr" Dec 02 00:08:55 crc kubenswrapper[4856]: I1202 00:08:55.772953 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:55 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:55 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:55 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:55 crc kubenswrapper[4856]: I1202 00:08:55.773025 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:56 crc kubenswrapper[4856]: I1202 00:08:56.538857 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-sdthq" Dec 02 00:08:56 crc kubenswrapper[4856]: I1202 00:08:56.775491 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:56 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:56 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:56 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:56 crc kubenswrapper[4856]: I1202 00:08:56.775542 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:57 crc kubenswrapper[4856]: I1202 00:08:57.773472 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:57 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:57 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:57 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:57 crc kubenswrapper[4856]: I1202 00:08:57.773983 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:58 crc kubenswrapper[4856]: I1202 00:08:58.781652 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:58 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:58 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:58 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:58 crc kubenswrapper[4856]: I1202 00:08:58.781725 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:08:59 crc kubenswrapper[4856]: I1202 00:08:59.772712 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:08:59 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:08:59 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:08:59 crc kubenswrapper[4856]: healthz check failed Dec 02 00:08:59 crc kubenswrapper[4856]: I1202 00:08:59.772787 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:09:00 crc kubenswrapper[4856]: I1202 00:09:00.772315 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:09:00 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:09:00 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:09:00 crc kubenswrapper[4856]: healthz check failed Dec 02 00:09:00 crc kubenswrapper[4856]: I1202 00:09:00.772381 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:09:00 crc kubenswrapper[4856]: I1202 00:09:00.942755 4856 patch_prober.go:28] interesting pod/console-f9d7485db-6zlxx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 02 00:09:00 crc kubenswrapper[4856]: I1202 00:09:00.942817 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-6zlxx" podUID="23476e03-4f2c-426e-a180-9ad48a56d758" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 02 00:09:00 crc kubenswrapper[4856]: I1202 00:09:00.977799 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-89qzj" Dec 02 00:09:01 crc kubenswrapper[4856]: I1202 00:09:01.773091 4856 patch_prober.go:28] interesting pod/router-default-5444994796-5phk8 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 00:09:01 crc kubenswrapper[4856]: [-]has-synced failed: reason withheld Dec 02 00:09:01 crc kubenswrapper[4856]: [+]process-running ok Dec 02 00:09:01 crc kubenswrapper[4856]: healthz check failed Dec 02 00:09:01 crc kubenswrapper[4856]: I1202 00:09:01.773149 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5phk8" podUID="819ff29c-f7f5-442f-8eb5-e7ccc25d2219" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.395951 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.513616 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir\") pod \"10b086f7-8e8e-4ebc-9036-941115221bde\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.513684 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access\") pod \"10b086f7-8e8e-4ebc-9036-941115221bde\" (UID: \"10b086f7-8e8e-4ebc-9036-941115221bde\") " Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.513820 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "10b086f7-8e8e-4ebc-9036-941115221bde" (UID: "10b086f7-8e8e-4ebc-9036-941115221bde"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.514049 4856 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10b086f7-8e8e-4ebc-9036-941115221bde-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.531928 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "10b086f7-8e8e-4ebc-9036-941115221bde" (UID: "10b086f7-8e8e-4ebc-9036-941115221bde"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.615386 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b086f7-8e8e-4ebc-9036-941115221bde-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.780140 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.783783 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-5phk8" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.983835 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"10b086f7-8e8e-4ebc-9036-941115221bde","Type":"ContainerDied","Data":"2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13"} Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.983885 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bd705509f1441a34bdc89496d7228ce60bbfb04d94f916f9e16870005af2d13" Dec 02 00:09:02 crc kubenswrapper[4856]: I1202 00:09:02.983891 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 00:09:05 crc kubenswrapper[4856]: I1202 00:09:05.061800 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:09:05 crc kubenswrapper[4856]: I1202 00:09:05.062065 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:09:07 crc kubenswrapper[4856]: I1202 00:09:07.897117 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:09:10 crc kubenswrapper[4856]: I1202 00:09:10.326894 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 00:09:10 crc kubenswrapper[4856]: I1202 00:09:10.947244 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:09:10 crc kubenswrapper[4856]: I1202 00:09:10.954373 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-6zlxx" Dec 02 00:09:15 crc kubenswrapper[4856]: I1202 00:09:15.049467 4856 generic.go:334] "Generic (PLEG): container finished" podID="ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" containerID="b76d19ac2e0cc065d560c2b6ed7708545d8363be7b79d92cc46817619a731ad9" exitCode=0 Dec 02 00:09:15 crc kubenswrapper[4856]: I1202 00:09:15.049554 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29410560-9nw7n" event={"ID":"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb","Type":"ContainerDied","Data":"b76d19ac2e0cc065d560c2b6ed7708545d8363be7b79d92cc46817619a731ad9"} Dec 02 00:09:21 crc kubenswrapper[4856]: I1202 00:09:21.428570 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2vztf" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.058714 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.059311 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5l2km,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-xzzbh_openshift-marketplace(6efab283-f656-41a4-8996-4aee7986e931): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.060663 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-xzzbh" podUID="6efab283-f656-41a4-8996-4aee7986e931" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.947273 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.947412 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2f6g2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ckmlx_openshift-marketplace(fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:22 crc kubenswrapper[4856]: E1202 00:09:22.948622 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ckmlx" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" Dec 02 00:09:24 crc kubenswrapper[4856]: E1202 00:09:24.780536 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 00:09:24 crc kubenswrapper[4856]: E1202 00:09:24.780740 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l48ws,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2mw6j_openshift-marketplace(ba515d9b-e262-408a-a28d-04c006e8a922): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:24 crc kubenswrapper[4856]: E1202 00:09:24.781933 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2mw6j" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" Dec 02 00:09:24 crc kubenswrapper[4856]: E1202 00:09:24.835902 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ckmlx" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" Dec 02 00:09:24 crc kubenswrapper[4856]: E1202 00:09:24.835984 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-xzzbh" podUID="6efab283-f656-41a4-8996-4aee7986e931" Dec 02 00:09:24 crc kubenswrapper[4856]: I1202 00:09:24.883826 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.029463 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca\") pod \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.029659 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skfcm\" (UniqueName: \"kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm\") pod \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\" (UID: \"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb\") " Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.030223 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca" (OuterVolumeSpecName: "serviceca") pod "ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" (UID: "ab1f3930-5ec8-49ad-844b-a6166d3ec3fb"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.034321 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm" (OuterVolumeSpecName: "kube-api-access-skfcm") pod "ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" (UID: "ab1f3930-5ec8-49ad-844b-a6166d3ec3fb"). InnerVolumeSpecName "kube-api-access-skfcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.106193 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29410560-9nw7n" event={"ID":"ab1f3930-5ec8-49ad-844b-a6166d3ec3fb","Type":"ContainerDied","Data":"921f02f31aa48256a1d45581e3b6fd0eeec92d3b6ad02cfe1a726a9d9b251c49"} Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.106237 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="921f02f31aa48256a1d45581e3b6fd0eeec92d3b6ad02cfe1a726a9d9b251c49" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.106218 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29410560-9nw7n" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.133192 4856 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.133247 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skfcm\" (UniqueName: \"kubernetes.io/projected/ab1f3930-5ec8-49ad-844b-a6166d3ec3fb-kube-api-access-skfcm\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:25 crc kubenswrapper[4856]: I1202 00:09:25.222099 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4zvgr"] Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.530317 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.530789 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fbkbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-v5shq_openshift-marketplace(b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.532223 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-v5shq" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.557139 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.557430 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hklgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vhxwd_openshift-marketplace(7763d51e-74d4-4bb9-b956-e33a31753604): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.558681 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vhxwd" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.635812 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.635988 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q98rj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-j2276_openshift-marketplace(884edeee-5df9-4820-be36-38b7095706ef): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:25 crc kubenswrapper[4856]: E1202 00:09:25.637497 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-j2276" podUID="884edeee-5df9-4820-be36-38b7095706ef" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.008268 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 00:09:26 crc kubenswrapper[4856]: E1202 00:09:26.008802 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" containerName="image-pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.008815 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" containerName="image-pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: E1202 00:09:26.008827 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10b086f7-8e8e-4ebc-9036-941115221bde" containerName="pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.008833 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="10b086f7-8e8e-4ebc-9036-941115221bde" containerName="pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.008917 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="10b086f7-8e8e-4ebc-9036-941115221bde" containerName="pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.008928 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab1f3930-5ec8-49ad-844b-a6166d3ec3fb" containerName="image-pruner" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.009258 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.011943 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.012621 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.017659 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.146821 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.146871 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.248087 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.248168 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.248292 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.268920 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:26 crc kubenswrapper[4856]: I1202 00:09:26.386925 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.626914 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vhxwd" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.626950 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-j2276" podUID="884edeee-5df9-4820-be36-38b7095706ef" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.626975 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-v5shq" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.627106 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2mw6j" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" Dec 02 00:09:28 crc kubenswrapper[4856]: W1202 00:09:28.632046 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbedea3d_dea3_407d_aae3_2ac725bcab34.slice/crio-399c7034a029a7b7e52f60c6f2e80a1b7ddbb1ac9b209a1182003ac3432c33a6 WatchSource:0}: Error finding container 399c7034a029a7b7e52f60c6f2e80a1b7ddbb1ac9b209a1182003ac3432c33a6: Status 404 returned error can't find the container with id 399c7034a029a7b7e52f60c6f2e80a1b7ddbb1ac9b209a1182003ac3432c33a6 Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.662262 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.662713 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljwxv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-s2slc_openshift-marketplace(f4b04b8c-61cf-4bea-9eda-db2fc30e2247): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:09:28 crc kubenswrapper[4856]: E1202 00:09:28.663942 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-s2slc" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" Dec 02 00:09:29 crc kubenswrapper[4856]: I1202 00:09:29.013335 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 00:09:29 crc kubenswrapper[4856]: I1202 00:09:29.127746 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" event={"ID":"cbedea3d-dea3-407d-aae3-2ac725bcab34","Type":"ContainerStarted","Data":"a5480512c7f84619cac00e26fe668e5af38bd7168b64fcb41742ce19daf48215"} Dec 02 00:09:29 crc kubenswrapper[4856]: I1202 00:09:29.128049 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" event={"ID":"cbedea3d-dea3-407d-aae3-2ac725bcab34","Type":"ContainerStarted","Data":"399c7034a029a7b7e52f60c6f2e80a1b7ddbb1ac9b209a1182003ac3432c33a6"} Dec 02 00:09:29 crc kubenswrapper[4856]: I1202 00:09:29.129896 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerStarted","Data":"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4"} Dec 02 00:09:29 crc kubenswrapper[4856]: I1202 00:09:29.132440 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2eabd796-ab20-4aa0-b27f-1196c08d4c7b","Type":"ContainerStarted","Data":"b6a50a7aeba2dfe6473051b0586a702a1f0af9d85aee1dd54e7a8cec9a7ac3bf"} Dec 02 00:09:29 crc kubenswrapper[4856]: E1202 00:09:29.137277 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-s2slc" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.142231 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4zvgr" event={"ID":"cbedea3d-dea3-407d-aae3-2ac725bcab34","Type":"ContainerStarted","Data":"9369ae97017a0ff79dcd3891eca51b08c2ff7cce99f65a7285a64cf5f396ee2f"} Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.145640 4856 generic.go:334] "Generic (PLEG): container finished" podID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerID="91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4" exitCode=0 Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.145724 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerDied","Data":"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4"} Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.149722 4856 generic.go:334] "Generic (PLEG): container finished" podID="2eabd796-ab20-4aa0-b27f-1196c08d4c7b" containerID="2e6db8ce32cf4c6937ed27f699ab925dea01b4afb6c9f191837582674c5b7ab7" exitCode=0 Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.149779 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2eabd796-ab20-4aa0-b27f-1196c08d4c7b","Type":"ContainerDied","Data":"2e6db8ce32cf4c6937ed27f699ab925dea01b4afb6c9f191837582674c5b7ab7"} Dec 02 00:09:30 crc kubenswrapper[4856]: I1202 00:09:30.180823 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-4zvgr" podStartSLOduration=177.180797749 podStartE2EDuration="2m57.180797749s" podCreationTimestamp="2025-12-02 00:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:09:30.170686402 +0000 UTC m=+197.197054486" watchObservedRunningTime="2025-12-02 00:09:30.180797749 +0000 UTC m=+197.207165783" Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.360776 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.524142 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir\") pod \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.524393 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access\") pod \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\" (UID: \"2eabd796-ab20-4aa0-b27f-1196c08d4c7b\") " Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.524251 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2eabd796-ab20-4aa0-b27f-1196c08d4c7b" (UID: "2eabd796-ab20-4aa0-b27f-1196c08d4c7b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.531832 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2eabd796-ab20-4aa0-b27f-1196c08d4c7b" (UID: "2eabd796-ab20-4aa0-b27f-1196c08d4c7b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.635188 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:31 crc kubenswrapper[4856]: I1202 00:09:31.635257 4856 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2eabd796-ab20-4aa0-b27f-1196c08d4c7b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.163267 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.163280 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2eabd796-ab20-4aa0-b27f-1196c08d4c7b","Type":"ContainerDied","Data":"b6a50a7aeba2dfe6473051b0586a702a1f0af9d85aee1dd54e7a8cec9a7ac3bf"} Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.163977 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6a50a7aeba2dfe6473051b0586a702a1f0af9d85aee1dd54e7a8cec9a7ac3bf" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.166263 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerStarted","Data":"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0"} Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.190733 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5r988" podStartSLOduration=2.574610218 podStartE2EDuration="42.190714322s" podCreationTimestamp="2025-12-02 00:08:50 +0000 UTC" firstStartedPulling="2025-12-02 00:08:51.836412106 +0000 UTC m=+158.862780110" lastFinishedPulling="2025-12-02 00:09:31.45251619 +0000 UTC m=+198.478884214" observedRunningTime="2025-12-02 00:09:32.190594969 +0000 UTC m=+199.216963013" watchObservedRunningTime="2025-12-02 00:09:32.190714322 +0000 UTC m=+199.217082326" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.402405 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 00:09:32 crc kubenswrapper[4856]: E1202 00:09:32.402649 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eabd796-ab20-4aa0-b27f-1196c08d4c7b" containerName="pruner" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.402661 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eabd796-ab20-4aa0-b27f-1196c08d4c7b" containerName="pruner" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.402755 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eabd796-ab20-4aa0-b27f-1196c08d4c7b" containerName="pruner" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.403130 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.408982 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.409245 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.413242 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.546289 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.546349 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.546457 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.647428 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.647485 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.647513 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.647624 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.647630 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.664971 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.721927 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:09:32 crc kubenswrapper[4856]: I1202 00:09:32.912759 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 00:09:33 crc kubenswrapper[4856]: I1202 00:09:33.172405 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"77c28b4e-13c3-403e-8408-fd51e65a1f58","Type":"ContainerStarted","Data":"1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415"} Dec 02 00:09:33 crc kubenswrapper[4856]: I1202 00:09:33.820027 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:09:34 crc kubenswrapper[4856]: I1202 00:09:34.178221 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"77c28b4e-13c3-403e-8408-fd51e65a1f58","Type":"ContainerStarted","Data":"d0a861d8427002c944a75b34065e0c8b122aaa111637fc5c5a603c6efba20f05"} Dec 02 00:09:34 crc kubenswrapper[4856]: I1202 00:09:34.190842 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.190823116 podStartE2EDuration="2.190823116s" podCreationTimestamp="2025-12-02 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:09:34.189257935 +0000 UTC m=+201.215625939" watchObservedRunningTime="2025-12-02 00:09:34.190823116 +0000 UTC m=+201.217191120" Dec 02 00:09:35 crc kubenswrapper[4856]: I1202 00:09:35.062203 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:09:35 crc kubenswrapper[4856]: I1202 00:09:35.062525 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:09:35 crc kubenswrapper[4856]: I1202 00:09:35.062565 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:09:35 crc kubenswrapper[4856]: I1202 00:09:35.063064 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:09:35 crc kubenswrapper[4856]: I1202 00:09:35.063144 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82" gracePeriod=600 Dec 02 00:09:36 crc kubenswrapper[4856]: I1202 00:09:36.192771 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82" exitCode=0 Dec 02 00:09:36 crc kubenswrapper[4856]: I1202 00:09:36.192851 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82"} Dec 02 00:09:37 crc kubenswrapper[4856]: I1202 00:09:37.202855 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b"} Dec 02 00:09:40 crc kubenswrapper[4856]: I1202 00:09:40.390621 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:09:40 crc kubenswrapper[4856]: I1202 00:09:40.391075 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:09:41 crc kubenswrapper[4856]: I1202 00:09:41.174205 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:09:41 crc kubenswrapper[4856]: I1202 00:09:41.260757 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:09:42 crc kubenswrapper[4856]: I1202 00:09:42.227967 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerStarted","Data":"f20d2a2f1acfd61a38b5ab76e15aba17a4c99e06a5a1690c17b022b8ceb62d04"} Dec 02 00:09:42 crc kubenswrapper[4856]: I1202 00:09:42.230254 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerStarted","Data":"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732"} Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.237407 4856 generic.go:334] "Generic (PLEG): container finished" podID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerID="f20d2a2f1acfd61a38b5ab76e15aba17a4c99e06a5a1690c17b022b8ceb62d04" exitCode=0 Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.237491 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerDied","Data":"f20d2a2f1acfd61a38b5ab76e15aba17a4c99e06a5a1690c17b022b8ceb62d04"} Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.240711 4856 generic.go:334] "Generic (PLEG): container finished" podID="6efab283-f656-41a4-8996-4aee7986e931" containerID="c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732" exitCode=0 Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.240825 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerDied","Data":"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732"} Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.243697 4856 generic.go:334] "Generic (PLEG): container finished" podID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerID="1880b981f11fc940b571c32c459cc164c72517ab33082d9384d1410b80495d1e" exitCode=0 Dec 02 00:09:43 crc kubenswrapper[4856]: I1202 00:09:43.243787 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerDied","Data":"1880b981f11fc940b571c32c459cc164c72517ab33082d9384d1410b80495d1e"} Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.262231 4856 generic.go:334] "Generic (PLEG): container finished" podID="7763d51e-74d4-4bb9-b956-e33a31753604" containerID="2d7288974c87ba73fd333e302f3a28544589d586210127a6c16e3915437cda5f" exitCode=0 Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.262461 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerDied","Data":"2d7288974c87ba73fd333e302f3a28544589d586210127a6c16e3915437cda5f"} Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.277308 4856 generic.go:334] "Generic (PLEG): container finished" podID="ba515d9b-e262-408a-a28d-04c006e8a922" containerID="13ce17ab937db264db34aa3ba0f137b74f9b5a8ec0e9387496bb9e565ddf387f" exitCode=0 Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.277650 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerDied","Data":"13ce17ab937db264db34aa3ba0f137b74f9b5a8ec0e9387496bb9e565ddf387f"} Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.294800 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerStarted","Data":"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb"} Dec 02 00:09:44 crc kubenswrapper[4856]: I1202 00:09:44.311716 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xzzbh" podStartSLOduration=3.245153843 podStartE2EDuration="58.311700282s" podCreationTimestamp="2025-12-02 00:08:46 +0000 UTC" firstStartedPulling="2025-12-02 00:08:48.762284711 +0000 UTC m=+155.788652715" lastFinishedPulling="2025-12-02 00:09:43.82883115 +0000 UTC m=+210.855199154" observedRunningTime="2025-12-02 00:09:44.311385673 +0000 UTC m=+211.337753677" watchObservedRunningTime="2025-12-02 00:09:44.311700282 +0000 UTC m=+211.338068286" Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.303422 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerStarted","Data":"2bb81cc2f98f0f5db7ac24cbdd5d5b14399c2a7ccafd48e42b17d3f57c93c9c4"} Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.307090 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerStarted","Data":"3736939a5d012a2de638b1b3d2b3d62f9bd1bc8a3d8046deeab02db9c78eb325"} Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.309867 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerStarted","Data":"ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c"} Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.325608 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v5shq" podStartSLOduration=2.956153464 podStartE2EDuration="58.325578812s" podCreationTimestamp="2025-12-02 00:08:47 +0000 UTC" firstStartedPulling="2025-12-02 00:08:48.770957581 +0000 UTC m=+155.797325595" lastFinishedPulling="2025-12-02 00:09:44.140382939 +0000 UTC m=+211.166750943" observedRunningTime="2025-12-02 00:09:45.323069597 +0000 UTC m=+212.349437611" watchObservedRunningTime="2025-12-02 00:09:45.325578812 +0000 UTC m=+212.351946816" Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.348484 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2mw6j" podStartSLOduration=2.118755218 podStartE2EDuration="58.348467905s" podCreationTimestamp="2025-12-02 00:08:47 +0000 UTC" firstStartedPulling="2025-12-02 00:08:48.753870717 +0000 UTC m=+155.780238711" lastFinishedPulling="2025-12-02 00:09:44.983583394 +0000 UTC m=+212.009951398" observedRunningTime="2025-12-02 00:09:45.346670129 +0000 UTC m=+212.373038143" watchObservedRunningTime="2025-12-02 00:09:45.348467905 +0000 UTC m=+212.374835909" Dec 02 00:09:45 crc kubenswrapper[4856]: I1202 00:09:45.364321 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ckmlx" podStartSLOduration=1.9072118900000001 podStartE2EDuration="56.364305366s" podCreationTimestamp="2025-12-02 00:08:49 +0000 UTC" firstStartedPulling="2025-12-02 00:08:49.786932681 +0000 UTC m=+156.813300685" lastFinishedPulling="2025-12-02 00:09:44.244026157 +0000 UTC m=+211.270394161" observedRunningTime="2025-12-02 00:09:45.363844634 +0000 UTC m=+212.390212648" watchObservedRunningTime="2025-12-02 00:09:45.364305366 +0000 UTC m=+212.390673370" Dec 02 00:09:46 crc kubenswrapper[4856]: I1202 00:09:46.316875 4856 generic.go:334] "Generic (PLEG): container finished" podID="884edeee-5df9-4820-be36-38b7095706ef" containerID="8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367" exitCode=0 Dec 02 00:09:46 crc kubenswrapper[4856]: I1202 00:09:46.316954 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerDied","Data":"8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367"} Dec 02 00:09:46 crc kubenswrapper[4856]: I1202 00:09:46.320096 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerStarted","Data":"5b0f8155746be220fafbc825aedd512fe7cb6314167a0adad2376ccb2983a331"} Dec 02 00:09:46 crc kubenswrapper[4856]: I1202 00:09:46.322683 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerID="03a517617af2dcc6d6912d82e6da7a06bb7ed6ac9fb28d3ad2bb68490687c612" exitCode=0 Dec 02 00:09:46 crc kubenswrapper[4856]: I1202 00:09:46.322718 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerDied","Data":"03a517617af2dcc6d6912d82e6da7a06bb7ed6ac9fb28d3ad2bb68490687c612"} Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.234463 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.234515 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.283302 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.303280 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vhxwd" podStartSLOduration=3.900116399 podStartE2EDuration="58.303265213s" podCreationTimestamp="2025-12-02 00:08:49 +0000 UTC" firstStartedPulling="2025-12-02 00:08:50.826735686 +0000 UTC m=+157.853103690" lastFinishedPulling="2025-12-02 00:09:45.2298845 +0000 UTC m=+212.256252504" observedRunningTime="2025-12-02 00:09:46.377460927 +0000 UTC m=+213.403828931" watchObservedRunningTime="2025-12-02 00:09:47.303265213 +0000 UTC m=+214.329633217" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.597042 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.597117 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.638336 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.848474 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.848538 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:09:47 crc kubenswrapper[4856]: I1202 00:09:47.891298 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:09:48 crc kubenswrapper[4856]: I1202 00:09:48.339102 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerStarted","Data":"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460"} Dec 02 00:09:48 crc kubenswrapper[4856]: I1202 00:09:48.359307 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j2276" podStartSLOduration=2.9819591020000003 podStartE2EDuration="1m1.359289216s" podCreationTimestamp="2025-12-02 00:08:47 +0000 UTC" firstStartedPulling="2025-12-02 00:08:48.752704237 +0000 UTC m=+155.779072241" lastFinishedPulling="2025-12-02 00:09:47.130034351 +0000 UTC m=+214.156402355" observedRunningTime="2025-12-02 00:09:48.356070993 +0000 UTC m=+215.382438997" watchObservedRunningTime="2025-12-02 00:09:48.359289216 +0000 UTC m=+215.385657220" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.370354 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.370418 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.406545 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.786598 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.786644 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:49 crc kubenswrapper[4856]: I1202 00:09:49.827236 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:50 crc kubenswrapper[4856]: I1202 00:09:50.383778 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:50 crc kubenswrapper[4856]: I1202 00:09:50.389610 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:09:52 crc kubenswrapper[4856]: I1202 00:09:52.484794 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:09:52 crc kubenswrapper[4856]: I1202 00:09:52.486544 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vhxwd" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="registry-server" containerID="cri-o://5b0f8155746be220fafbc825aedd512fe7cb6314167a0adad2376ccb2983a331" gracePeriod=2 Dec 02 00:09:53 crc kubenswrapper[4856]: I1202 00:09:53.365582 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerStarted","Data":"f3e62fc70b46677ac52469ea377fb1f95b2b2088516517e1d7880456bf09f78e"} Dec 02 00:09:54 crc kubenswrapper[4856]: I1202 00:09:54.375332 4856 generic.go:334] "Generic (PLEG): container finished" podID="7763d51e-74d4-4bb9-b956-e33a31753604" containerID="5b0f8155746be220fafbc825aedd512fe7cb6314167a0adad2376ccb2983a331" exitCode=0 Dec 02 00:09:54 crc kubenswrapper[4856]: I1202 00:09:54.375389 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerDied","Data":"5b0f8155746be220fafbc825aedd512fe7cb6314167a0adad2376ccb2983a331"} Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.286456 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.382184 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vhxwd" event={"ID":"7763d51e-74d4-4bb9-b956-e33a31753604","Type":"ContainerDied","Data":"3905196c6ecd843c25af81afa2381776ba4920db128e69b5f639b71639406c87"} Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.382253 4856 scope.go:117] "RemoveContainer" containerID="5b0f8155746be220fafbc825aedd512fe7cb6314167a0adad2376ccb2983a331" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.382206 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vhxwd" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.395802 4856 scope.go:117] "RemoveContainer" containerID="2d7288974c87ba73fd333e302f3a28544589d586210127a6c16e3915437cda5f" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.401911 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s2slc" podStartSLOduration=4.886693423 podStartE2EDuration="1m5.401894502s" podCreationTimestamp="2025-12-02 00:08:50 +0000 UTC" firstStartedPulling="2025-12-02 00:08:51.85151512 +0000 UTC m=+158.877883124" lastFinishedPulling="2025-12-02 00:09:52.366716169 +0000 UTC m=+219.393084203" observedRunningTime="2025-12-02 00:09:55.400789383 +0000 UTC m=+222.427157397" watchObservedRunningTime="2025-12-02 00:09:55.401894502 +0000 UTC m=+222.428262506" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.422775 4856 scope.go:117] "RemoveContainer" containerID="dfc3741403b5d58289709fc58221116c2abbf3e0052be6d66eba58776f0a90b2" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.477898 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities\") pod \"7763d51e-74d4-4bb9-b956-e33a31753604\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.477959 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hklgb\" (UniqueName: \"kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb\") pod \"7763d51e-74d4-4bb9-b956-e33a31753604\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.477993 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content\") pod \"7763d51e-74d4-4bb9-b956-e33a31753604\" (UID: \"7763d51e-74d4-4bb9-b956-e33a31753604\") " Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.478700 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities" (OuterVolumeSpecName: "utilities") pod "7763d51e-74d4-4bb9-b956-e33a31753604" (UID: "7763d51e-74d4-4bb9-b956-e33a31753604"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.487051 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb" (OuterVolumeSpecName: "kube-api-access-hklgb") pod "7763d51e-74d4-4bb9-b956-e33a31753604" (UID: "7763d51e-74d4-4bb9-b956-e33a31753604"). InnerVolumeSpecName "kube-api-access-hklgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.496900 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7763d51e-74d4-4bb9-b956-e33a31753604" (UID: "7763d51e-74d4-4bb9-b956-e33a31753604"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.579529 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.579557 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hklgb\" (UniqueName: \"kubernetes.io/projected/7763d51e-74d4-4bb9-b956-e33a31753604-kube-api-access-hklgb\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.579568 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7763d51e-74d4-4bb9-b956-e33a31753604-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.714641 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:09:55 crc kubenswrapper[4856]: I1202 00:09:55.718261 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vhxwd"] Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.264514 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" path="/var/lib/kubelet/pods/7763d51e-74d4-4bb9-b956-e33a31753604/volumes" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.279368 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.386869 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.386923 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.423256 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.456845 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.636384 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:09:57 crc kubenswrapper[4856]: I1202 00:09:57.895458 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:09:58 crc kubenswrapper[4856]: I1202 00:09:58.846247 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" containerID="cri-o://7050b9d2c2f6f25e2de25d185625ad6a56ffc565a23852375e46adbbeac732fa" gracePeriod=15 Dec 02 00:09:59 crc kubenswrapper[4856]: I1202 00:09:59.685741 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:09:59 crc kubenswrapper[4856]: I1202 00:09:59.685989 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2mw6j" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="registry-server" containerID="cri-o://3736939a5d012a2de638b1b3d2b3d62f9bd1bc8a3d8046deeab02db9c78eb325" gracePeriod=2 Dec 02 00:09:59 crc kubenswrapper[4856]: I1202 00:09:59.884559 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:09:59 crc kubenswrapper[4856]: I1202 00:09:59.884825 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v5shq" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="registry-server" containerID="cri-o://2bb81cc2f98f0f5db7ac24cbdd5d5b14399c2a7ccafd48e42b17d3f57c93c9c4" gracePeriod=2 Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.423811 4856 generic.go:334] "Generic (PLEG): container finished" podID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerID="7050b9d2c2f6f25e2de25d185625ad6a56ffc565a23852375e46adbbeac732fa" exitCode=0 Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.424129 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" event={"ID":"6cc6ab38-8ed6-468a-864c-25354ae45707","Type":"ContainerDied","Data":"7050b9d2c2f6f25e2de25d185625ad6a56ffc565a23852375e46adbbeac732fa"} Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.428324 4856 generic.go:334] "Generic (PLEG): container finished" podID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerID="2bb81cc2f98f0f5db7ac24cbdd5d5b14399c2a7ccafd48e42b17d3f57c93c9c4" exitCode=0 Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.428368 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerDied","Data":"2bb81cc2f98f0f5db7ac24cbdd5d5b14399c2a7ccafd48e42b17d3f57c93c9c4"} Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.732779 4856 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7lsm8 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.732827 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.799906 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.799940 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:00 crc kubenswrapper[4856]: I1202 00:10:00.843620 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.439012 4856 generic.go:334] "Generic (PLEG): container finished" podID="ba515d9b-e262-408a-a28d-04c006e8a922" containerID="3736939a5d012a2de638b1b3d2b3d62f9bd1bc8a3d8046deeab02db9c78eb325" exitCode=0 Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.440516 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerDied","Data":"3736939a5d012a2de638b1b3d2b3d62f9bd1bc8a3d8046deeab02db9c78eb325"} Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.478603 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.785250 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.790379 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.794366 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.955915 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956189 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities\") pod \"ba515d9b-e262-408a-a28d-04c006e8a922\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956212 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956235 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956255 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l48ws\" (UniqueName: \"kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws\") pod \"ba515d9b-e262-408a-a28d-04c006e8a922\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956256 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956273 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956322 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities\") pod \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956345 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956366 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956386 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956408 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content\") pod \"ba515d9b-e262-408a-a28d-04c006e8a922\" (UID: \"ba515d9b-e262-408a-a28d-04c006e8a922\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956433 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956456 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content\") pod \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956474 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956490 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956509 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956528 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956549 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbkbx\" (UniqueName: \"kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx\") pod \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\" (UID: \"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956567 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw8bp\" (UniqueName: \"kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956618 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies\") pod \"6cc6ab38-8ed6-468a-864c-25354ae45707\" (UID: \"6cc6ab38-8ed6-468a-864c-25354ae45707\") " Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956748 4856 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.956954 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.957008 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.957262 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.957516 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities" (OuterVolumeSpecName: "utilities") pod "ba515d9b-e262-408a-a28d-04c006e8a922" (UID: "ba515d9b-e262-408a-a28d-04c006e8a922"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.958187 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.958408 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities" (OuterVolumeSpecName: "utilities") pod "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" (UID: "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.962151 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx" (OuterVolumeSpecName: "kube-api-access-fbkbx") pod "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" (UID: "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6"). InnerVolumeSpecName "kube-api-access-fbkbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.962459 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.962782 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.963233 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.965105 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.965362 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp" (OuterVolumeSpecName: "kube-api-access-xw8bp") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "kube-api-access-xw8bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.965503 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.965733 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.966018 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.966406 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6cc6ab38-8ed6-468a-864c-25354ae45707" (UID: "6cc6ab38-8ed6-468a-864c-25354ae45707"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:10:01 crc kubenswrapper[4856]: I1202 00:10:01.975824 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws" (OuterVolumeSpecName: "kube-api-access-l48ws") pod "ba515d9b-e262-408a-a28d-04c006e8a922" (UID: "ba515d9b-e262-408a-a28d-04c006e8a922"). InnerVolumeSpecName "kube-api-access-l48ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.020554 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba515d9b-e262-408a-a28d-04c006e8a922" (UID: "ba515d9b-e262-408a-a28d-04c006e8a922"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.027517 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" (UID: "b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.057964 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058001 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l48ws\" (UniqueName: \"kubernetes.io/projected/ba515d9b-e262-408a-a28d-04c006e8a922-kube-api-access-l48ws\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058017 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058030 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058045 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058057 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058068 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058079 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058088 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058098 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058110 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058120 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058131 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058142 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058154 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbkbx\" (UniqueName: \"kubernetes.io/projected/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6-kube-api-access-fbkbx\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058168 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw8bp\" (UniqueName: \"kubernetes.io/projected/6cc6ab38-8ed6-468a-864c-25354ae45707-kube-api-access-xw8bp\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058178 4856 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058187 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba515d9b-e262-408a-a28d-04c006e8a922-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.058197 4856 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6cc6ab38-8ed6-468a-864c-25354ae45707-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.446201 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mw6j" event={"ID":"ba515d9b-e262-408a-a28d-04c006e8a922","Type":"ContainerDied","Data":"d35540e9b44b26aa994d14b5992e6b7c81c986b50f074439055ec65e27390e6d"} Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.446283 4856 scope.go:117] "RemoveContainer" containerID="3736939a5d012a2de638b1b3d2b3d62f9bd1bc8a3d8046deeab02db9c78eb325" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.446385 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mw6j" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.447322 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" event={"ID":"6cc6ab38-8ed6-468a-864c-25354ae45707","Type":"ContainerDied","Data":"aa587bc089f6f4fd45e2d8f775fd3bee273c6fc2c7d6f61c057df7a25ee3a1a6"} Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.447365 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7lsm8" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.450894 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v5shq" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.450869 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v5shq" event={"ID":"b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6","Type":"ContainerDied","Data":"18f929a68cf1ad9dfb5570205793c43029f98542cbd785dd4469d98409d854bc"} Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.462492 4856 scope.go:117] "RemoveContainer" containerID="13ce17ab937db264db34aa3ba0f137b74f9b5a8ec0e9387496bb9e565ddf387f" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.481946 4856 scope.go:117] "RemoveContainer" containerID="c8bba70ea2c36af4b06d8c8731803c83f2aa85ab72d537e77f7a124ca3cdaef9" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.491123 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.491994 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2mw6j"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.500459 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.502375 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7lsm8"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.506801 4856 scope.go:117] "RemoveContainer" containerID="7050b9d2c2f6f25e2de25d185625ad6a56ffc565a23852375e46adbbeac732fa" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.509844 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.515854 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v5shq"] Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.520820 4856 scope.go:117] "RemoveContainer" containerID="2bb81cc2f98f0f5db7ac24cbdd5d5b14399c2a7ccafd48e42b17d3f57c93c9c4" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.531321 4856 scope.go:117] "RemoveContainer" containerID="1880b981f11fc940b571c32c459cc164c72517ab33082d9384d1410b80495d1e" Dec 02 00:10:02 crc kubenswrapper[4856]: I1202 00:10:02.544197 4856 scope.go:117] "RemoveContainer" containerID="36afff96001e7bd909106a7fb2f68996ea6d06ce58ad77308b3852b4a28d5971" Dec 02 00:10:03 crc kubenswrapper[4856]: I1202 00:10:03.257887 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" path="/var/lib/kubelet/pods/6cc6ab38-8ed6-468a-864c-25354ae45707/volumes" Dec 02 00:10:03 crc kubenswrapper[4856]: I1202 00:10:03.258347 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" path="/var/lib/kubelet/pods/b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6/volumes" Dec 02 00:10:03 crc kubenswrapper[4856]: I1202 00:10:03.258899 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" path="/var/lib/kubelet/pods/ba515d9b-e262-408a-a28d-04c006e8a922/volumes" Dec 02 00:10:04 crc kubenswrapper[4856]: I1202 00:10:04.284226 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:10:04 crc kubenswrapper[4856]: I1202 00:10:04.285624 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s2slc" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="registry-server" containerID="cri-o://f3e62fc70b46677ac52469ea377fb1f95b2b2088516517e1d7880456bf09f78e" gracePeriod=2 Dec 02 00:10:04 crc kubenswrapper[4856]: I1202 00:10:04.469677 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerID="f3e62fc70b46677ac52469ea377fb1f95b2b2088516517e1d7880456bf09f78e" exitCode=0 Dec 02 00:10:04 crc kubenswrapper[4856]: I1202 00:10:04.469734 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerDied","Data":"f3e62fc70b46677ac52469ea377fb1f95b2b2088516517e1d7880456bf09f78e"} Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.150068 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.305381 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities\") pod \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.305772 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljwxv\" (UniqueName: \"kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv\") pod \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.306290 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities" (OuterVolumeSpecName: "utilities") pod "f4b04b8c-61cf-4bea-9eda-db2fc30e2247" (UID: "f4b04b8c-61cf-4bea-9eda-db2fc30e2247"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.306558 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content\") pod \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\" (UID: \"f4b04b8c-61cf-4bea-9eda-db2fc30e2247\") " Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.306904 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.310902 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv" (OuterVolumeSpecName: "kube-api-access-ljwxv") pod "f4b04b8c-61cf-4bea-9eda-db2fc30e2247" (UID: "f4b04b8c-61cf-4bea-9eda-db2fc30e2247"). InnerVolumeSpecName "kube-api-access-ljwxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.405064 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4b04b8c-61cf-4bea-9eda-db2fc30e2247" (UID: "f4b04b8c-61cf-4bea-9eda-db2fc30e2247"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.407607 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljwxv\" (UniqueName: \"kubernetes.io/projected/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-kube-api-access-ljwxv\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.407639 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b04b8c-61cf-4bea-9eda-db2fc30e2247-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.476740 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2slc" event={"ID":"f4b04b8c-61cf-4bea-9eda-db2fc30e2247","Type":"ContainerDied","Data":"6feecb096eb5c4e27c373f62ec41bc741a2590edf07d4fd3e456c7cbc3275f59"} Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.476811 4856 scope.go:117] "RemoveContainer" containerID="f3e62fc70b46677ac52469ea377fb1f95b2b2088516517e1d7880456bf09f78e" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.476884 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2slc" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.497661 4856 scope.go:117] "RemoveContainer" containerID="03a517617af2dcc6d6912d82e6da7a06bb7ed6ac9fb28d3ad2bb68490687c612" Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.516687 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.519946 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s2slc"] Dec 02 00:10:05 crc kubenswrapper[4856]: I1202 00:10:05.523674 4856 scope.go:117] "RemoveContainer" containerID="4b79351a7b6ae82c8381d9bc51fdbd614746961e8d356e76028e68570494f2ac" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708231 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff"] Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708476 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708490 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708501 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708509 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708522 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708529 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708540 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708547 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708558 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708567 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708577 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708584 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708615 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708623 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708633 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708640 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708649 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708657 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="extract-content" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708673 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708681 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708695 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708703 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708713 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708720 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: E1202 00:10:06.708732 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708739 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="extract-utilities" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708862 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7763d51e-74d4-4bb9-b956-e33a31753604" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708876 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708885 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="b47edae5-e6ef-4ac7-9dfd-9b8be6659aa6" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708903 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba515d9b-e262-408a-a28d-04c006e8a922" containerName="registry-server" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.708911 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cc6ab38-8ed6-468a-864c-25354ae45707" containerName="oauth-openshift" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.709308 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.711574 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.711752 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.711836 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.711757 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712141 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712161 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712481 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712485 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712575 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712664 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.712759 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.717412 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.730540 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.731410 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.732196 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.735051 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff"] Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.823945 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lrs7\" (UniqueName: \"kubernetes.io/projected/7172a592-8483-473f-8cc8-23e4c068b95b-kube-api-access-5lrs7\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.823997 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824025 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824049 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824089 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824118 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824140 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824168 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824189 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824223 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824352 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824385 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824413 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7172a592-8483-473f-8cc8-23e4c068b95b-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.824441 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925247 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925318 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925340 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925362 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925380 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925407 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925439 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925457 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925476 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7172a592-8483-473f-8cc8-23e4c068b95b-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925494 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925515 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lrs7\" (UniqueName: \"kubernetes.io/projected/7172a592-8483-473f-8cc8-23e4c068b95b-kube-api-access-5lrs7\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925533 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925550 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.925565 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.926676 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-audit-policies\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.926855 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7172a592-8483-473f-8cc8-23e4c068b95b-audit-dir\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.927088 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.927368 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-service-ca\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.927747 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.930826 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.930831 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.930899 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-session\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.931033 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-error\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.931301 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.931803 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-user-template-login\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.939283 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.939565 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7172a592-8483-473f-8cc8-23e4c068b95b-v4-0-config-system-router-certs\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:06 crc kubenswrapper[4856]: I1202 00:10:06.944837 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lrs7\" (UniqueName: \"kubernetes.io/projected/7172a592-8483-473f-8cc8-23e4c068b95b-kube-api-access-5lrs7\") pod \"oauth-openshift-5cf8f9f8d-ftrff\" (UID: \"7172a592-8483-473f-8cc8-23e4c068b95b\") " pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.031643 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.197765 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff"] Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.260505 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b04b8c-61cf-4bea-9eda-db2fc30e2247" path="/var/lib/kubelet/pods/f4b04b8c-61cf-4bea-9eda-db2fc30e2247/volumes" Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.490758 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" event={"ID":"7172a592-8483-473f-8cc8-23e4c068b95b","Type":"ContainerStarted","Data":"b5dcb94000a9e884b38705fa42982bca0e979c3bc74949ef6ec193306c4a81d8"} Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.491026 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" event={"ID":"7172a592-8483-473f-8cc8-23e4c068b95b","Type":"ContainerStarted","Data":"6a1016c6a642ec03746331833230d11e539dd283b6c1d2c4dbd5decca0dcc11c"} Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.491450 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.509918 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" podStartSLOduration=34.509904295 podStartE2EDuration="34.509904295s" podCreationTimestamp="2025-12-02 00:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:10:07.508737675 +0000 UTC m=+234.535105699" watchObservedRunningTime="2025-12-02 00:10:07.509904295 +0000 UTC m=+234.536272289" Dec 02 00:10:07 crc kubenswrapper[4856]: I1202 00:10:07.986508 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5cf8f9f8d-ftrff" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.116899 4856 file.go:109] "Unable to process watch event" err="can't process config file \"/etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml\": /etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml: couldn't parse as pod(Object 'Kind' is missing in 'null'), please check config file" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.119380 4856 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120153 4856 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120419 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08" gracePeriod=15 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120483 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120483 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99" gracePeriod=15 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120555 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05" gracePeriod=15 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120581 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927" gracePeriod=15 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.120512 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f" gracePeriod=15 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123238 4856 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123439 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123455 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123465 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123471 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123481 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123487 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123495 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123501 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123514 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123519 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123527 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123534 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123634 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123645 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123655 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123663 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123670 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123680 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.123775 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.123783 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.159068 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275328 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275389 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275424 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275445 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275473 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275489 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275507 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.275522 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376651 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376712 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376734 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376761 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376778 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376796 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376811 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376843 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376905 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376944 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376967 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.376989 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.377013 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.377038 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.377061 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.377085 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.452439 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:10:11 crc kubenswrapper[4856]: W1202 00:10:11.470177 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-0315ab5af0f830497f7b291d2662e0b1664ae0499f6e9823eae665c55f469dd0 WatchSource:0}: Error finding container 0315ab5af0f830497f7b291d2662e0b1664ae0499f6e9823eae665c55f469dd0: Status 404 returned error can't find the container with id 0315ab5af0f830497f7b291d2662e0b1664ae0499f6e9823eae665c55f469dd0 Dec 02 00:10:11 crc kubenswrapper[4856]: E1202 00:10:11.475275 4856 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.222:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d3d774554c972 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 00:10:11.472689522 +0000 UTC m=+238.499057526,LastTimestamp:2025-12-02 00:10:11.472689522 +0000 UTC m=+238.499057526,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.528996 4856 generic.go:334] "Generic (PLEG): container finished" podID="77c28b4e-13c3-403e-8408-fd51e65a1f58" containerID="d0a861d8427002c944a75b34065e0c8b122aaa111637fc5c5a603c6efba20f05" exitCode=0 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.529060 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"77c28b4e-13c3-403e-8408-fd51e65a1f58","Type":"ContainerDied","Data":"d0a861d8427002c944a75b34065e0c8b122aaa111637fc5c5a603c6efba20f05"} Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.529856 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.530406 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.530713 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0315ab5af0f830497f7b291d2662e0b1664ae0499f6e9823eae665c55f469dd0"} Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.534177 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.538999 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.539735 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927" exitCode=0 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.539759 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99" exitCode=0 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.539772 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f" exitCode=0 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.539783 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05" exitCode=2 Dec 02 00:10:11 crc kubenswrapper[4856]: I1202 00:10:11.539824 4856 scope.go:117] "RemoveContainer" containerID="4fa200e47ad44bd7743a33301e0cfad546775b1941520986601ee7fa3a7dedd5" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.551361 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3ee5d25084eae3612f87882212629a284ab805dc7adf692a4474b5bdc8cae138"} Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.554072 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.554793 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.556006 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.802889 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.803749 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.804410 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.999085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock\") pod \"77c28b4e-13c3-403e-8408-fd51e65a1f58\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.999251 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock" (OuterVolumeSpecName: "var-lock") pod "77c28b4e-13c3-403e-8408-fd51e65a1f58" (UID: "77c28b4e-13c3-403e-8408-fd51e65a1f58"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.999554 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access\") pod \"77c28b4e-13c3-403e-8408-fd51e65a1f58\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.999715 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir\") pod \"77c28b4e-13c3-403e-8408-fd51e65a1f58\" (UID: \"77c28b4e-13c3-403e-8408-fd51e65a1f58\") " Dec 02 00:10:12 crc kubenswrapper[4856]: I1202 00:10:12.999772 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "77c28b4e-13c3-403e-8408-fd51e65a1f58" (UID: "77c28b4e-13c3-403e-8408-fd51e65a1f58"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.000389 4856 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.000471 4856 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77c28b4e-13c3-403e-8408-fd51e65a1f58-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.004698 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "77c28b4e-13c3-403e-8408-fd51e65a1f58" (UID: "77c28b4e-13c3-403e-8408-fd51e65a1f58"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.101260 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77c28b4e-13c3-403e-8408-fd51e65a1f58-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.261863 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.263162 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.484302 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.485145 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.485696 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.487728 4856 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.487955 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.567865 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"77c28b4e-13c3-403e-8408-fd51e65a1f58","Type":"ContainerDied","Data":"1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415"} Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.568845 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.568050 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.570960 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.571602 4856 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08" exitCode=0 Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.571770 4856 scope.go:117] "RemoveContainer" containerID="d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.571786 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.572792 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.572981 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.573200 4856 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.595319 4856 scope.go:117] "RemoveContainer" containerID="64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.612914 4856 scope.go:117] "RemoveContainer" containerID="2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617272 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617328 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617401 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617473 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617498 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617614 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617734 4856 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617753 4856 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.617765 4856 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.630292 4856 scope.go:117] "RemoveContainer" containerID="f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.654375 4856 scope.go:117] "RemoveContainer" containerID="d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.677371 4856 scope.go:117] "RemoveContainer" containerID="524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.703925 4856 scope.go:117] "RemoveContainer" containerID="d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.704301 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\": container with ID starting with d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927 not found: ID does not exist" containerID="d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704356 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927"} err="failed to get container status \"d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\": rpc error: code = NotFound desc = could not find container \"d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927\": container with ID starting with d00f46202b4ba157a5c01504929bbeffa79479d061af4d83980724f5669a9927 not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704379 4856 scope.go:117] "RemoveContainer" containerID="64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.704634 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\": container with ID starting with 64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99 not found: ID does not exist" containerID="64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704659 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99"} err="failed to get container status \"64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\": rpc error: code = NotFound desc = could not find container \"64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99\": container with ID starting with 64c598aacd841ed9632ebc8b403325c1db1b6379ddf370ad069c6310d9711e99 not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704695 4856 scope.go:117] "RemoveContainer" containerID="2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.704929 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\": container with ID starting with 2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f not found: ID does not exist" containerID="2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704970 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f"} err="failed to get container status \"2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\": rpc error: code = NotFound desc = could not find container \"2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f\": container with ID starting with 2ad8309d7bc559a2514aa18face2a9ff431981fde77ce02910aa6a56eac3247f not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.704990 4856 scope.go:117] "RemoveContainer" containerID="f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.705231 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\": container with ID starting with f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05 not found: ID does not exist" containerID="f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.705277 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05"} err="failed to get container status \"f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\": rpc error: code = NotFound desc = could not find container \"f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05\": container with ID starting with f2d29b53e392bdad181558e831497879c405acc472ace20f7858d327231afe05 not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.705295 4856 scope.go:117] "RemoveContainer" containerID="d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.705776 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\": container with ID starting with d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08 not found: ID does not exist" containerID="d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.705816 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08"} err="failed to get container status \"d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\": rpc error: code = NotFound desc = could not find container \"d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08\": container with ID starting with d2e475fd72d490eda19daaf40a24b6efa214df13cf2d786f7b921c3201c71c08 not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.705831 4856 scope.go:117] "RemoveContainer" containerID="524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d" Dec 02 00:10:13 crc kubenswrapper[4856]: E1202 00:10:13.706032 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\": container with ID starting with 524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d not found: ID does not exist" containerID="524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.706089 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d"} err="failed to get container status \"524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\": rpc error: code = NotFound desc = could not find container \"524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d\": container with ID starting with 524c50e38c38b1dc22e4dbcc66d146d9a1ac456f006241fcb73813aa97a4d09d not found: ID does not exist" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.887290 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.888150 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:13 crc kubenswrapper[4856]: I1202 00:10:13.888674 4856 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:15 crc kubenswrapper[4856]: I1202 00:10:15.260143 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 00:10:15 crc kubenswrapper[4856]: E1202 00:10:15.343637 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache]" Dec 02 00:10:16 crc kubenswrapper[4856]: E1202 00:10:16.572464 4856 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.222:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d3d774554c972 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 00:10:11.472689522 +0000 UTC m=+238.499057526,LastTimestamp:2025-12-02 00:10:11.472689522 +0000 UTC m=+238.499057526,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.949643 4856 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.950301 4856 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.950798 4856 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.951332 4856 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.951694 4856 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:17 crc kubenswrapper[4856]: I1202 00:10:17.951736 4856 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 02 00:10:17 crc kubenswrapper[4856]: E1202 00:10:17.952062 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="200ms" Dec 02 00:10:18 crc kubenswrapper[4856]: E1202 00:10:18.153766 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="400ms" Dec 02 00:10:18 crc kubenswrapper[4856]: E1202 00:10:18.554770 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="800ms" Dec 02 00:10:19 crc kubenswrapper[4856]: E1202 00:10:19.356462 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="1.6s" Dec 02 00:10:20 crc kubenswrapper[4856]: E1202 00:10:20.956979 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="3.2s" Dec 02 00:10:23 crc kubenswrapper[4856]: I1202 00:10:23.258983 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:23 crc kubenswrapper[4856]: I1202 00:10:23.259385 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:24 crc kubenswrapper[4856]: E1202 00:10:24.175792 4856 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.222:6443: connect: connection refused" interval="6.4s" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.252100 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.252778 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.253340 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.266206 4856 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.266251 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:24 crc kubenswrapper[4856]: E1202 00:10:24.267244 4856 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.267777 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:24 crc kubenswrapper[4856]: I1202 00:10:24.634868 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8a510c8805b1b972498d0851cc25d2190a474bf0beec36949fb8b26eb3c75c4e"} Dec 02 00:10:25 crc kubenswrapper[4856]: E1202 00:10:25.271298 4856 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.222:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" volumeName="registry-storage" Dec 02 00:10:25 crc kubenswrapper[4856]: E1202 00:10:25.456928 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache]" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.643148 4856 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0d3d2adc5f23be23dcff7886a9b0b8f191c81c15f97f96a7fe11cf8d8692df13" exitCode=0 Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.643493 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0d3d2adc5f23be23dcff7886a9b0b8f191c81c15f97f96a7fe11cf8d8692df13"} Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.643745 4856 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.643795 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.644009 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.644400 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:25 crc kubenswrapper[4856]: E1202 00:10:25.644475 4856 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.647547 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.647772 4856 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9" exitCode=1 Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.647867 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9"} Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.648243 4856 scope.go:117] "RemoveContainer" containerID="e06144fc92f1f9f189b941129d2f4b481c6d3350a6ca0d7a1af5d69d48d07df9" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.648702 4856 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.649028 4856 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:25 crc kubenswrapper[4856]: I1202 00:10:25.649453 4856 status_manager.go:851] "Failed to get status for pod" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.222:6443: connect: connection refused" Dec 02 00:10:26 crc kubenswrapper[4856]: I1202 00:10:26.659019 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"89dd50411b95b9d54ef8eb21f945d9c431956e9b716162c3de29c178f84d6181"} Dec 02 00:10:26 crc kubenswrapper[4856]: I1202 00:10:26.659061 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7b540b59faae95faf7e488b71cec23a10609865d2bdbee0946cf1a72d9b39fb0"} Dec 02 00:10:26 crc kubenswrapper[4856]: I1202 00:10:26.664677 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 00:10:26 crc kubenswrapper[4856]: I1202 00:10:26.664733 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"18b3624492210b86625fb98de513d630910c20e508b92883e1b0de824627e248"} Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674375 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"962db6bdb4b2200b7da635c80eba2ea56b4e1164a7f4dd4bc6cf8ef9a6d67bf8"} Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674423 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"74e7be486b425c2e7768538b22e0ddd5dd37dce0e7acd1e01c7d4b4eda0d8b46"} Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674438 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5c9a013540f19090e3a9a9d0197558d7708fb6b7c0174d5b89bd608d89bcf3e1"} Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674607 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674732 4856 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:27 crc kubenswrapper[4856]: I1202 00:10:27.674761 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:29 crc kubenswrapper[4856]: I1202 00:10:29.268442 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:29 crc kubenswrapper[4856]: I1202 00:10:29.268727 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:29 crc kubenswrapper[4856]: I1202 00:10:29.276665 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:29 crc kubenswrapper[4856]: I1202 00:10:29.436658 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:10:30 crc kubenswrapper[4856]: I1202 00:10:30.423705 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:10:30 crc kubenswrapper[4856]: I1202 00:10:30.427274 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:10:32 crc kubenswrapper[4856]: I1202 00:10:32.704165 4856 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.268729 4856 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="3bea8c7a-b466-499b-bc56-22c5c8fde65d" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.710972 4856 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.711017 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.720184 4856 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="3bea8c7a-b466-499b-bc56-22c5c8fde65d" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.720839 4856 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://7b540b59faae95faf7e488b71cec23a10609865d2bdbee0946cf1a72d9b39fb0" Dec 02 00:10:33 crc kubenswrapper[4856]: I1202 00:10:33.720871 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:34 crc kubenswrapper[4856]: I1202 00:10:34.716013 4856 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:34 crc kubenswrapper[4856]: I1202 00:10:34.716427 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="acd644f8-9ce7-42e4-af58-11bb128c9974" Dec 02 00:10:34 crc kubenswrapper[4856]: I1202 00:10:34.719733 4856 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="3bea8c7a-b466-499b-bc56-22c5c8fde65d" Dec 02 00:10:35 crc kubenswrapper[4856]: E1202 00:10:35.596613 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache]" Dec 02 00:10:39 crc kubenswrapper[4856]: I1202 00:10:39.444073 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 00:10:42 crc kubenswrapper[4856]: I1202 00:10:42.954326 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.080430 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.511473 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.593876 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.686564 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.759778 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 00:10:43 crc kubenswrapper[4856]: I1202 00:10:43.984917 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.342315 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.642933 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.685745 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.721263 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.910045 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.959050 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 00:10:44 crc kubenswrapper[4856]: I1202 00:10:44.994408 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.039400 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.187147 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.322788 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.347193 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 00:10:45 crc kubenswrapper[4856]: E1202 00:10:45.713385 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache]" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.733619 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.768661 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.894891 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.927469 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.960683 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 00:10:45 crc kubenswrapper[4856]: I1202 00:10:45.991397 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.052335 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.245863 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.292832 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.300199 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.349895 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.409170 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.436189 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.442479 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.467807 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.554238 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.893432 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.931915 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 00:10:46 crc kubenswrapper[4856]: I1202 00:10:46.991776 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.085445 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.230707 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.268794 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.287834 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.304181 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.369658 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.458059 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.465544 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.466464 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.492577 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.537315 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.607534 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.639107 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.697720 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.726308 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.738964 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.753854 4856 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.854812 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.937438 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.949130 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 00:10:47 crc kubenswrapper[4856]: I1202 00:10:47.964116 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.033416 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.039320 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.149577 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.153642 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.158295 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.274166 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.301899 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.314463 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.354426 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.473185 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.505913 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.524972 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.674692 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.692481 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.693899 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.738476 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.763444 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.800567 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.822258 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.841762 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.842133 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.864177 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.968175 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 00:10:48 crc kubenswrapper[4856]: I1202 00:10:48.982399 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.026777 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.029055 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.037639 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.083451 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.090427 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.143075 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.150970 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.314077 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.322832 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.343461 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.385570 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.396419 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.434082 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.435340 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.604875 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.647850 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.648007 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.654542 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.806420 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.817570 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.904848 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 00:10:49 crc kubenswrapper[4856]: I1202 00:10:49.952829 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.010720 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.033062 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.060399 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.152813 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.190497 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.193977 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.208324 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.209690 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.228488 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.241477 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.367554 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.525955 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.590173 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.593853 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.606780 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.648555 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.660395 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.701040 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.715177 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.727705 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.768659 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.780152 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.787203 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.819816 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.906895 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 00:10:50 crc kubenswrapper[4856]: I1202 00:10:50.998286 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.181182 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.181761 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.233687 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.290747 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.362192 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.368048 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.378391 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.410768 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.422425 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.447629 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.524491 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.547500 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.570136 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.589687 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.599216 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.616065 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.689407 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.733413 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.743621 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.757998 4856 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.804440 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.847140 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 00:10:51 crc kubenswrapper[4856]: I1202 00:10:51.968213 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.024787 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.102406 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.202540 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.204431 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.413691 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.424164 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.452093 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.461165 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.486525 4856 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.488697 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.727214 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.756585 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.762430 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.801752 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.937181 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 00:10:52 crc kubenswrapper[4856]: I1202 00:10:52.993873 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.124117 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.145288 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.257293 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.272281 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.272619 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.315816 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.328218 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.391476 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.468253 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.526295 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.557888 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.559254 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.590670 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.632277 4856 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.746533 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.789044 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.837356 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.837605 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.844860 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.890489 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 00:10:53 crc kubenswrapper[4856]: I1202 00:10:53.955386 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.013429 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.096525 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.102242 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.297797 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.490115 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.510018 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.671440 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.687980 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.777522 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.907563 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.914090 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 00:10:54 crc kubenswrapper[4856]: I1202 00:10:54.984335 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.158374 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.220013 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.221446 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.450804 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.568606 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.688401 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.696135 4856 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.697685 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=44.697673137 podStartE2EDuration="44.697673137s" podCreationTimestamp="2025-12-02 00:10:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:10:32.655828595 +0000 UTC m=+259.682196629" watchObservedRunningTime="2025-12-02 00:10:55.697673137 +0000 UTC m=+282.724041141" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.700104 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.700156 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.704862 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.705498 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.718701 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=23.718679819 podStartE2EDuration="23.718679819s" podCreationTimestamp="2025-12-02 00:10:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:10:55.715235101 +0000 UTC m=+282.741603105" watchObservedRunningTime="2025-12-02 00:10:55.718679819 +0000 UTC m=+282.745047833" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.733661 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.745633 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.759186 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.799219 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.810163 4856 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 00:10:55 crc kubenswrapper[4856]: E1202 00:10:55.873053 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache]" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.969443 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 00:10:55 crc kubenswrapper[4856]: I1202 00:10:55.979758 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.037630 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.053316 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.223215 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.315844 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.340703 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.434880 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.576564 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.819335 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.837580 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 00:10:56 crc kubenswrapper[4856]: I1202 00:10:56.964799 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.022124 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.040912 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.114850 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.176503 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.240911 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.411069 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.440524 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.587710 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.878828 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 00:10:57 crc kubenswrapper[4856]: I1202 00:10:57.947228 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.049267 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.150362 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.211808 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.221859 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.352894 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.509768 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.597936 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.870358 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 00:10:58 crc kubenswrapper[4856]: I1202 00:10:58.990584 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 00:10:59 crc kubenswrapper[4856]: I1202 00:10:59.060077 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 00:10:59 crc kubenswrapper[4856]: I1202 00:10:59.409853 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 00:10:59 crc kubenswrapper[4856]: I1202 00:10:59.424084 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 00:11:01 crc kubenswrapper[4856]: I1202 00:11:01.025104 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 00:11:05 crc kubenswrapper[4856]: E1202 00:11:05.985182 4856 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice/crio-1f41c8936e470e1e65a9ae4201c580534f65d62215353bd1bc633d87e943c415\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pod77c28b4e_13c3_403e_8408_fd51e65a1f58.slice\": RecentStats: unable to find data in memory cache]" Dec 02 00:11:06 crc kubenswrapper[4856]: I1202 00:11:06.757551 4856 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 00:11:06 crc kubenswrapper[4856]: I1202 00:11:06.758239 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://3ee5d25084eae3612f87882212629a284ab805dc7adf692a4474b5bdc8cae138" gracePeriod=5 Dec 02 00:11:11 crc kubenswrapper[4856]: I1202 00:11:11.934989 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 00:11:11 crc kubenswrapper[4856]: I1202 00:11:11.935336 4856 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="3ee5d25084eae3612f87882212629a284ab805dc7adf692a4474b5bdc8cae138" exitCode=137 Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.360646 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.360773 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436406 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436509 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436528 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436554 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436634 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436641 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436685 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436687 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.436800 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.437172 4856 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.437196 4856 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.437213 4856 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.437232 4856 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.449843 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.538877 4856 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.944399 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.944516 4856 scope.go:117] "RemoveContainer" containerID="3ee5d25084eae3612f87882212629a284ab805dc7adf692a4474b5bdc8cae138" Dec 02 00:11:12 crc kubenswrapper[4856]: I1202 00:11:12.944675 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.264901 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.265563 4856 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.283114 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.283207 4856 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="cffb137e-e15f-46fb-bfb5-a8ec30c22a7a" Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.290030 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.290088 4856 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="cffb137e-e15f-46fb-bfb5-a8ec30c22a7a" Dec 02 00:11:13 crc kubenswrapper[4856]: I1202 00:11:13.436188 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 00:11:17 crc kubenswrapper[4856]: I1202 00:11:17.982707 4856 generic.go:334] "Generic (PLEG): container finished" podID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerID="7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c" exitCode=0 Dec 02 00:11:17 crc kubenswrapper[4856]: I1202 00:11:17.983165 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerDied","Data":"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c"} Dec 02 00:11:17 crc kubenswrapper[4856]: I1202 00:11:17.983915 4856 scope.go:117] "RemoveContainer" containerID="7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c" Dec 02 00:11:18 crc kubenswrapper[4856]: I1202 00:11:18.992715 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerStarted","Data":"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b"} Dec 02 00:11:18 crc kubenswrapper[4856]: I1202 00:11:18.993383 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:11:18 crc kubenswrapper[4856]: I1202 00:11:18.997813 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:11:21 crc kubenswrapper[4856]: I1202 00:11:21.760201 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.406994 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.408433 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" podUID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" containerName="controller-manager" containerID="cri-o://259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9" gracePeriod=30 Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.495630 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.495930 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" podUID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" containerName="route-controller-manager" containerID="cri-o://75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4" gracePeriod=30 Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.747951 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.798652 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config\") pod \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.798701 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhxb6\" (UniqueName: \"kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6\") pod \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.798768 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert\") pod \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.798795 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles\") pod \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.798830 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca\") pod \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\" (UID: \"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.799504 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca" (OuterVolumeSpecName: "client-ca") pod "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" (UID: "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.799606 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" (UID: "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.799734 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config" (OuterVolumeSpecName: "config") pod "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" (UID: "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.803564 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6" (OuterVolumeSpecName: "kube-api-access-jhxb6") pod "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" (UID: "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d"). InnerVolumeSpecName "kube-api-access-jhxb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.807892 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" (UID: "b905a3d3-a5b2-44a5-8d8c-4bb200cff14d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.808560 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900113 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt8g9\" (UniqueName: \"kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9\") pod \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900247 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config\") pod \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900274 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert\") pod \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900294 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca\") pod \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\" (UID: \"4fe59379-8ff6-4c3b-aa26-7e65a11d405b\") " Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900470 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900481 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhxb6\" (UniqueName: \"kubernetes.io/projected/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-kube-api-access-jhxb6\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900491 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900499 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900509 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900979 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca" (OuterVolumeSpecName: "client-ca") pod "4fe59379-8ff6-4c3b-aa26-7e65a11d405b" (UID: "4fe59379-8ff6-4c3b-aa26-7e65a11d405b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.900995 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config" (OuterVolumeSpecName: "config") pod "4fe59379-8ff6-4c3b-aa26-7e65a11d405b" (UID: "4fe59379-8ff6-4c3b-aa26-7e65a11d405b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.903639 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4fe59379-8ff6-4c3b-aa26-7e65a11d405b" (UID: "4fe59379-8ff6-4c3b-aa26-7e65a11d405b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:11:29 crc kubenswrapper[4856]: I1202 00:11:29.903662 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9" (OuterVolumeSpecName: "kube-api-access-bt8g9") pod "4fe59379-8ff6-4c3b-aa26-7e65a11d405b" (UID: "4fe59379-8ff6-4c3b-aa26-7e65a11d405b"). InnerVolumeSpecName "kube-api-access-bt8g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.002015 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.002049 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.002059 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.002069 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt8g9\" (UniqueName: \"kubernetes.io/projected/4fe59379-8ff6-4c3b-aa26-7e65a11d405b-kube-api-access-bt8g9\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.061218 4856 generic.go:334] "Generic (PLEG): container finished" podID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" containerID="259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9" exitCode=0 Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.061283 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" event={"ID":"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d","Type":"ContainerDied","Data":"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9"} Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.061267 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.061339 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dp8tx" event={"ID":"b905a3d3-a5b2-44a5-8d8c-4bb200cff14d","Type":"ContainerDied","Data":"2b96c664b7d3b8a7b0b38bde1875526f197644c915e5a22e33cd4f20c9be9847"} Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.061375 4856 scope.go:117] "RemoveContainer" containerID="259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.063924 4856 generic.go:334] "Generic (PLEG): container finished" podID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" containerID="75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4" exitCode=0 Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.063960 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" event={"ID":"4fe59379-8ff6-4c3b-aa26-7e65a11d405b","Type":"ContainerDied","Data":"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4"} Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.063985 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" event={"ID":"4fe59379-8ff6-4c3b-aa26-7e65a11d405b","Type":"ContainerDied","Data":"a311675b19260da888c98ad6eede72071926e23d6e3768fe223b0baaffa24de5"} Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.064044 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.075451 4856 scope.go:117] "RemoveContainer" containerID="259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9" Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.075954 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9\": container with ID starting with 259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9 not found: ID does not exist" containerID="259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.075990 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9"} err="failed to get container status \"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9\": rpc error: code = NotFound desc = could not find container \"259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9\": container with ID starting with 259caaa69f6c0c8e3d22b7ec3931bdfa9e3a7619c877140f5bfa4671501e29f9 not found: ID does not exist" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.076016 4856 scope.go:117] "RemoveContainer" containerID="75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.093225 4856 scope.go:117] "RemoveContainer" containerID="75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4" Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.093634 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4\": container with ID starting with 75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4 not found: ID does not exist" containerID="75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.093710 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4"} err="failed to get container status \"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4\": rpc error: code = NotFound desc = could not find container \"75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4\": container with ID starting with 75628dccd6f7bcb0f3ec21cc9608b03bdf5e67314cd53bb76cc2ae2120bd4cc4 not found: ID does not exist" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.094126 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.103062 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dp8tx"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.109018 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.112579 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xph6l"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.767441 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.767741 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" containerName="route-controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.767792 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" containerName="route-controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.767808 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" containerName="controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.767821 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" containerName="controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.767839 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" containerName="installer" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.767851 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" containerName="installer" Dec 02 00:11:30 crc kubenswrapper[4856]: E1202 00:11:30.767867 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.767876 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.768010 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.768028 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="77c28b4e-13c3-403e-8408-fd51e65a1f58" containerName="installer" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.768047 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" containerName="route-controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.768061 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" containerName="controller-manager" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.768530 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.774108 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.774183 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.774359 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.774370 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.774662 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.775011 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.781327 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.782245 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.792085 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.792278 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.792333 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.792287 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.792789 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.797808 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.801755 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.803642 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.811822 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrl7m\" (UniqueName: \"kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.811911 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.811942 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.812087 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.817698 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.912956 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.913503 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.913641 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8tgk\" (UniqueName: \"kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.913763 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrl7m\" (UniqueName: \"kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.913872 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914004 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914094 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914174 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914298 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914479 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.914800 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.922578 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:30 crc kubenswrapper[4856]: I1202 00:11:30.931285 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrl7m\" (UniqueName: \"kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m\") pod \"route-controller-manager-596dc78bdd-h2pxw\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.015368 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8tgk\" (UniqueName: \"kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.015437 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.015529 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.015555 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.015579 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.016935 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.017295 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.018035 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.024189 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.038707 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8tgk\" (UniqueName: \"kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk\") pod \"controller-manager-8464b9b6cd-vgtmq\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.104193 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.116056 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.220687 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.229926 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.267986 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fe59379-8ff6-4c3b-aa26-7e65a11d405b" path="/var/lib/kubelet/pods/4fe59379-8ff6-4c3b-aa26-7e65a11d405b/volumes" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.268688 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b905a3d3-a5b2-44a5-8d8c-4bb200cff14d" path="/var/lib/kubelet/pods/b905a3d3-a5b2-44a5-8d8c-4bb200cff14d/volumes" Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.408827 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:31 crc kubenswrapper[4856]: I1202 00:11:31.441852 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:31 crc kubenswrapper[4856]: W1202 00:11:31.446143 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2bf66be_a39c_4eee_bde0_cee90e4c3400.slice/crio-c7b1a5578f628ce41ddaa57c03aa4400554108032c4e0e4771f89e0afc0ecee7 WatchSource:0}: Error finding container c7b1a5578f628ce41ddaa57c03aa4400554108032c4e0e4771f89e0afc0ecee7: Status 404 returned error can't find the container with id c7b1a5578f628ce41ddaa57c03aa4400554108032c4e0e4771f89e0afc0ecee7 Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.078470 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" event={"ID":"6bf197ed-87f5-4089-a7f6-540c7e43118f","Type":"ContainerStarted","Data":"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8"} Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.078514 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" event={"ID":"6bf197ed-87f5-4089-a7f6-540c7e43118f","Type":"ContainerStarted","Data":"34af86372bbfa616b0eb75c0119c66f8121cf63b966405bed8e1754d15e363fa"} Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.078523 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerName="route-controller-manager" containerID="cri-o://29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8" gracePeriod=30 Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.078608 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.079565 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" event={"ID":"d2bf66be-a39c-4eee-bde0-cee90e4c3400","Type":"ContainerStarted","Data":"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595"} Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.079584 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" event={"ID":"d2bf66be-a39c-4eee-bde0-cee90e4c3400","Type":"ContainerStarted","Data":"c7b1a5578f628ce41ddaa57c03aa4400554108032c4e0e4771f89e0afc0ecee7"} Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.079695 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" podUID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" containerName="controller-manager" containerID="cri-o://9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595" gracePeriod=30 Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.079757 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.084758 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.099828 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" podStartSLOduration=3.099809521 podStartE2EDuration="3.099809521s" podCreationTimestamp="2025-12-02 00:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:11:32.096528038 +0000 UTC m=+319.122896042" watchObservedRunningTime="2025-12-02 00:11:32.099809521 +0000 UTC m=+319.126177525" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.121232 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" podStartSLOduration=3.121213723 podStartE2EDuration="3.121213723s" podCreationTimestamp="2025-12-02 00:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:11:32.117724575 +0000 UTC m=+319.144092589" watchObservedRunningTime="2025-12-02 00:11:32.121213723 +0000 UTC m=+319.147581727" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.124114 4856 patch_prober.go:28] interesting pod/route-controller-manager-596dc78bdd-h2pxw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": read tcp 10.217.0.2:37124->10.217.0.58:8443: read: connection reset by peer" start-of-body= Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.124157 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": read tcp 10.217.0.2:37124->10.217.0.58:8443: read: connection reset by peer" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.456757 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-596dc78bdd-h2pxw_6bf197ed-87f5-4089-a7f6-540c7e43118f/route-controller-manager/0.log" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.456827 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.461725 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.483414 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:11:32 crc kubenswrapper[4856]: E1202 00:11:32.483702 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" containerName="controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.483718 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" containerName="controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: E1202 00:11:32.483729 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerName="route-controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.483737 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerName="route-controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.483845 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" containerName="controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.483860 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerName="route-controller-manager" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.484282 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.495260 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547142 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrl7m\" (UniqueName: \"kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m\") pod \"6bf197ed-87f5-4089-a7f6-540c7e43118f\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547188 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles\") pod \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547226 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config\") pod \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547254 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert\") pod \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547317 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config\") pod \"6bf197ed-87f5-4089-a7f6-540c7e43118f\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547340 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca\") pod \"6bf197ed-87f5-4089-a7f6-540c7e43118f\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547371 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8tgk\" (UniqueName: \"kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk\") pod \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547414 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert\") pod \"6bf197ed-87f5-4089-a7f6-540c7e43118f\" (UID: \"6bf197ed-87f5-4089-a7f6-540c7e43118f\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547435 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca\") pod \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\" (UID: \"d2bf66be-a39c-4eee-bde0-cee90e4c3400\") " Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547610 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fktl\" (UniqueName: \"kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547636 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547660 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.547697 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.548153 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d2bf66be-a39c-4eee-bde0-cee90e4c3400" (UID: "d2bf66be-a39c-4eee-bde0-cee90e4c3400"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.548198 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config" (OuterVolumeSpecName: "config") pod "d2bf66be-a39c-4eee-bde0-cee90e4c3400" (UID: "d2bf66be-a39c-4eee-bde0-cee90e4c3400"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.548444 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca" (OuterVolumeSpecName: "client-ca") pod "d2bf66be-a39c-4eee-bde0-cee90e4c3400" (UID: "d2bf66be-a39c-4eee-bde0-cee90e4c3400"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.548734 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config" (OuterVolumeSpecName: "config") pod "6bf197ed-87f5-4089-a7f6-540c7e43118f" (UID: "6bf197ed-87f5-4089-a7f6-540c7e43118f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.548951 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca" (OuterVolumeSpecName: "client-ca") pod "6bf197ed-87f5-4089-a7f6-540c7e43118f" (UID: "6bf197ed-87f5-4089-a7f6-540c7e43118f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.551949 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk" (OuterVolumeSpecName: "kube-api-access-m8tgk") pod "d2bf66be-a39c-4eee-bde0-cee90e4c3400" (UID: "d2bf66be-a39c-4eee-bde0-cee90e4c3400"). InnerVolumeSpecName "kube-api-access-m8tgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.551989 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m" (OuterVolumeSpecName: "kube-api-access-hrl7m") pod "6bf197ed-87f5-4089-a7f6-540c7e43118f" (UID: "6bf197ed-87f5-4089-a7f6-540c7e43118f"). InnerVolumeSpecName "kube-api-access-hrl7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.552048 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d2bf66be-a39c-4eee-bde0-cee90e4c3400" (UID: "d2bf66be-a39c-4eee-bde0-cee90e4c3400"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.555754 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6bf197ed-87f5-4089-a7f6-540c7e43118f" (UID: "6bf197ed-87f5-4089-a7f6-540c7e43118f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.648452 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.648540 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fktl\" (UniqueName: \"kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.648566 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.648902 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649318 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649407 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8tgk\" (UniqueName: \"kubernetes.io/projected/d2bf66be-a39c-4eee-bde0-cee90e4c3400-kube-api-access-m8tgk\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649425 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf197ed-87f5-4089-a7f6-540c7e43118f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649434 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649443 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrl7m\" (UniqueName: \"kubernetes.io/projected/6bf197ed-87f5-4089-a7f6-540c7e43118f-kube-api-access-hrl7m\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649451 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649459 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2bf66be-a39c-4eee-bde0-cee90e4c3400-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649466 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2bf66be-a39c-4eee-bde0-cee90e4c3400-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649474 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649482 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf197ed-87f5-4089-a7f6-540c7e43118f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.649997 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.652395 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.663641 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fktl\" (UniqueName: \"kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl\") pod \"route-controller-manager-5598468cdf-m7646\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:32 crc kubenswrapper[4856]: I1202 00:11:32.806069 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.021473 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:11:33 crc kubenswrapper[4856]: W1202 00:11:33.024575 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod322c5833_2f45_4dbd_8b7f_8c16c9bf0013.slice/crio-cdb68b361dc46f303c88b1afb9cba76a0592870b88b0032f00c1f7aa124b6c32 WatchSource:0}: Error finding container cdb68b361dc46f303c88b1afb9cba76a0592870b88b0032f00c1f7aa124b6c32: Status 404 returned error can't find the container with id cdb68b361dc46f303c88b1afb9cba76a0592870b88b0032f00c1f7aa124b6c32 Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.088678 4856 generic.go:334] "Generic (PLEG): container finished" podID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" containerID="9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595" exitCode=0 Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.088742 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.088817 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" event={"ID":"d2bf66be-a39c-4eee-bde0-cee90e4c3400","Type":"ContainerDied","Data":"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595"} Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.088891 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq" event={"ID":"d2bf66be-a39c-4eee-bde0-cee90e4c3400","Type":"ContainerDied","Data":"c7b1a5578f628ce41ddaa57c03aa4400554108032c4e0e4771f89e0afc0ecee7"} Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.088917 4856 scope.go:117] "RemoveContainer" containerID="9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.090091 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" event={"ID":"322c5833-2f45-4dbd-8b7f-8c16c9bf0013","Type":"ContainerStarted","Data":"cdb68b361dc46f303c88b1afb9cba76a0592870b88b0032f00c1f7aa124b6c32"} Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.094089 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-596dc78bdd-h2pxw_6bf197ed-87f5-4089-a7f6-540c7e43118f/route-controller-manager/0.log" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.094166 4856 generic.go:334] "Generic (PLEG): container finished" podID="6bf197ed-87f5-4089-a7f6-540c7e43118f" containerID="29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8" exitCode=255 Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.094226 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" event={"ID":"6bf197ed-87f5-4089-a7f6-540c7e43118f","Type":"ContainerDied","Data":"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8"} Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.094260 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" event={"ID":"6bf197ed-87f5-4089-a7f6-540c7e43118f","Type":"ContainerDied","Data":"34af86372bbfa616b0eb75c0119c66f8121cf63b966405bed8e1754d15e363fa"} Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.094349 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.103142 4856 scope.go:117] "RemoveContainer" containerID="9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595" Dec 02 00:11:33 crc kubenswrapper[4856]: E1202 00:11:33.105178 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595\": container with ID starting with 9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595 not found: ID does not exist" containerID="9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.105395 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595"} err="failed to get container status \"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595\": rpc error: code = NotFound desc = could not find container \"9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595\": container with ID starting with 9513feb43d021fef77424abcaefa8513525af17df76568dc4adbb3565d04c595 not found: ID does not exist" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.105551 4856 scope.go:117] "RemoveContainer" containerID="29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.123001 4856 scope.go:117] "RemoveContainer" containerID="29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8" Dec 02 00:11:33 crc kubenswrapper[4856]: E1202 00:11:33.124813 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8\": container with ID starting with 29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8 not found: ID does not exist" containerID="29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.125013 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8"} err="failed to get container status \"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8\": rpc error: code = NotFound desc = could not find container \"29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8\": container with ID starting with 29d9cc970aeec3c27cfe4e01593efac3354e24742dab35ed807cdea24c29bdb8 not found: ID does not exist" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.138268 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.144413 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-vgtmq"] Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.149299 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.153391 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-h2pxw"] Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.261688 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf197ed-87f5-4089-a7f6-540c7e43118f" path="/var/lib/kubelet/pods/6bf197ed-87f5-4089-a7f6-540c7e43118f/volumes" Dec 02 00:11:33 crc kubenswrapper[4856]: I1202 00:11:33.262637 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2bf66be-a39c-4eee-bde0-cee90e4c3400" path="/var/lib/kubelet/pods/d2bf66be-a39c-4eee-bde0-cee90e4c3400/volumes" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.104282 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" event={"ID":"322c5833-2f45-4dbd-8b7f-8c16c9bf0013","Type":"ContainerStarted","Data":"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9"} Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.104646 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.110381 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.126435 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" podStartSLOduration=3.126413295 podStartE2EDuration="3.126413295s" podCreationTimestamp="2025-12-02 00:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:11:34.122069725 +0000 UTC m=+321.148437749" watchObservedRunningTime="2025-12-02 00:11:34.126413295 +0000 UTC m=+321.152781329" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.768323 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.769067 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.774206 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.774882 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.774898 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.775163 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.775345 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.780026 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.785652 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.786480 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.889888 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.890071 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.890233 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.890328 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.890491 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ppgb\" (UniqueName: \"kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.992088 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.992157 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.992190 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.992241 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ppgb\" (UniqueName: \"kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.992274 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.993252 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.993349 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.993460 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:34 crc kubenswrapper[4856]: I1202 00:11:34.997508 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:35 crc kubenswrapper[4856]: I1202 00:11:35.007238 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ppgb\" (UniqueName: \"kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb\") pod \"controller-manager-5964cbcb45-sq7gs\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:35 crc kubenswrapper[4856]: I1202 00:11:35.088165 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:35 crc kubenswrapper[4856]: I1202 00:11:35.304377 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:11:35 crc kubenswrapper[4856]: W1202 00:11:35.310518 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bf1920f_cd87_4bc9_a808_31f75559d97b.slice/crio-d5ba13d33746e723d6d29268975b4054fe15d84d8fc0202f3f7395d4669753b9 WatchSource:0}: Error finding container d5ba13d33746e723d6d29268975b4054fe15d84d8fc0202f3f7395d4669753b9: Status 404 returned error can't find the container with id d5ba13d33746e723d6d29268975b4054fe15d84d8fc0202f3f7395d4669753b9 Dec 02 00:11:36 crc kubenswrapper[4856]: I1202 00:11:36.122844 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" event={"ID":"3bf1920f-cd87-4bc9-a808-31f75559d97b","Type":"ContainerStarted","Data":"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045"} Dec 02 00:11:36 crc kubenswrapper[4856]: I1202 00:11:36.123208 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" event={"ID":"3bf1920f-cd87-4bc9-a808-31f75559d97b","Type":"ContainerStarted","Data":"d5ba13d33746e723d6d29268975b4054fe15d84d8fc0202f3f7395d4669753b9"} Dec 02 00:11:36 crc kubenswrapper[4856]: I1202 00:11:36.123443 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:36 crc kubenswrapper[4856]: I1202 00:11:36.127700 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:11:36 crc kubenswrapper[4856]: I1202 00:11:36.139691 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" podStartSLOduration=5.13957646 podStartE2EDuration="5.13957646s" podCreationTimestamp="2025-12-02 00:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:11:36.136620135 +0000 UTC m=+323.162988139" watchObservedRunningTime="2025-12-02 00:11:36.13957646 +0000 UTC m=+323.165944464" Dec 02 00:12:05 crc kubenswrapper[4856]: I1202 00:12:05.062246 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:12:05 crc kubenswrapper[4856]: I1202 00:12:05.062622 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.391980 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.392609 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" podUID="3bf1920f-cd87-4bc9-a808-31f75559d97b" containerName="controller-manager" containerID="cri-o://3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045" gracePeriod=30 Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.867401 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.952356 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert\") pod \"3bf1920f-cd87-4bc9-a808-31f75559d97b\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.952488 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca\") pod \"3bf1920f-cd87-4bc9-a808-31f75559d97b\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.952534 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config\") pod \"3bf1920f-cd87-4bc9-a808-31f75559d97b\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.952564 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ppgb\" (UniqueName: \"kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb\") pod \"3bf1920f-cd87-4bc9-a808-31f75559d97b\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.952604 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles\") pod \"3bf1920f-cd87-4bc9-a808-31f75559d97b\" (UID: \"3bf1920f-cd87-4bc9-a808-31f75559d97b\") " Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.953399 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca" (OuterVolumeSpecName: "client-ca") pod "3bf1920f-cd87-4bc9-a808-31f75559d97b" (UID: "3bf1920f-cd87-4bc9-a808-31f75559d97b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.953494 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3bf1920f-cd87-4bc9-a808-31f75559d97b" (UID: "3bf1920f-cd87-4bc9-a808-31f75559d97b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.953535 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config" (OuterVolumeSpecName: "config") pod "3bf1920f-cd87-4bc9-a808-31f75559d97b" (UID: "3bf1920f-cd87-4bc9-a808-31f75559d97b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.957597 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb" (OuterVolumeSpecName: "kube-api-access-7ppgb") pod "3bf1920f-cd87-4bc9-a808-31f75559d97b" (UID: "3bf1920f-cd87-4bc9-a808-31f75559d97b"). InnerVolumeSpecName "kube-api-access-7ppgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:09 crc kubenswrapper[4856]: I1202 00:12:09.966304 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3bf1920f-cd87-4bc9-a808-31f75559d97b" (UID: "3bf1920f-cd87-4bc9-a808-31f75559d97b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.054280 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.054310 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.054320 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ppgb\" (UniqueName: \"kubernetes.io/projected/3bf1920f-cd87-4bc9-a808-31f75559d97b-kube-api-access-7ppgb\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.054331 4856 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3bf1920f-cd87-4bc9-a808-31f75559d97b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.054341 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf1920f-cd87-4bc9-a808-31f75559d97b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.327506 4856 generic.go:334] "Generic (PLEG): container finished" podID="3bf1920f-cd87-4bc9-a808-31f75559d97b" containerID="3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045" exitCode=0 Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.327570 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" event={"ID":"3bf1920f-cd87-4bc9-a808-31f75559d97b","Type":"ContainerDied","Data":"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045"} Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.327646 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" event={"ID":"3bf1920f-cd87-4bc9-a808-31f75559d97b","Type":"ContainerDied","Data":"d5ba13d33746e723d6d29268975b4054fe15d84d8fc0202f3f7395d4669753b9"} Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.327680 4856 scope.go:117] "RemoveContainer" containerID="3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.328197 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-sq7gs" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.352650 4856 scope.go:117] "RemoveContainer" containerID="3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045" Dec 02 00:12:10 crc kubenswrapper[4856]: E1202 00:12:10.353096 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045\": container with ID starting with 3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045 not found: ID does not exist" containerID="3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.353169 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045"} err="failed to get container status \"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045\": rpc error: code = NotFound desc = could not find container \"3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045\": container with ID starting with 3a1ed6f1eb677a22471d67e7fabf0980e0d66f86c728b6866b4ca3b51df7e045 not found: ID does not exist" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.376321 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.382048 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-sq7gs"] Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.803680 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h"] Dec 02 00:12:10 crc kubenswrapper[4856]: E1202 00:12:10.803999 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf1920f-cd87-4bc9-a808-31f75559d97b" containerName="controller-manager" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.804019 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf1920f-cd87-4bc9-a808-31f75559d97b" containerName="controller-manager" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.804185 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf1920f-cd87-4bc9-a808-31f75559d97b" containerName="controller-manager" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.804886 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.811231 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.811328 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.811953 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.813371 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.813749 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.816380 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.816783 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.836097 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h"] Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.863909 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.863993 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-client-ca\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.864202 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpd77\" (UniqueName: \"kubernetes.io/projected/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-kube-api-access-rpd77\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.864308 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-serving-cert\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.864333 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-config\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.965747 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-client-ca\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.965792 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpd77\" (UniqueName: \"kubernetes.io/projected/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-kube-api-access-rpd77\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.965844 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-serving-cert\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.965873 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-config\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.966117 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.967583 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-config\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.967705 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-client-ca\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.968794 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-proxy-ca-bundles\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.971345 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-serving-cert\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:10 crc kubenswrapper[4856]: I1202 00:12:10.988669 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpd77\" (UniqueName: \"kubernetes.io/projected/e5af55a3-43a3-4d88-8fb9-f3ad022518f1-kube-api-access-rpd77\") pod \"controller-manager-8464b9b6cd-dfd8h\" (UID: \"e5af55a3-43a3-4d88-8fb9-f3ad022518f1\") " pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:11 crc kubenswrapper[4856]: I1202 00:12:11.139424 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:11 crc kubenswrapper[4856]: I1202 00:12:11.269693 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bf1920f-cd87-4bc9-a808-31f75559d97b" path="/var/lib/kubelet/pods/3bf1920f-cd87-4bc9-a808-31f75559d97b/volumes" Dec 02 00:12:11 crc kubenswrapper[4856]: I1202 00:12:11.353138 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h"] Dec 02 00:12:12 crc kubenswrapper[4856]: I1202 00:12:12.346388 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" event={"ID":"e5af55a3-43a3-4d88-8fb9-f3ad022518f1","Type":"ContainerStarted","Data":"d4762d902c6e37abee3f6c4a0cebba9580049fcb94c1fbd823f422f906062c23"} Dec 02 00:12:12 crc kubenswrapper[4856]: I1202 00:12:12.346947 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:12 crc kubenswrapper[4856]: I1202 00:12:12.346965 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" event={"ID":"e5af55a3-43a3-4d88-8fb9-f3ad022518f1","Type":"ContainerStarted","Data":"d35aaff48891e5a0e12bc8318ea825d6ae9e2a65922569faff998de42a52d39a"} Dec 02 00:12:12 crc kubenswrapper[4856]: I1202 00:12:12.353142 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" Dec 02 00:12:12 crc kubenswrapper[4856]: I1202 00:12:12.370553 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8464b9b6cd-dfd8h" podStartSLOduration=3.370530548 podStartE2EDuration="3.370530548s" podCreationTimestamp="2025-12-02 00:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:12:12.363311247 +0000 UTC m=+359.389679291" watchObservedRunningTime="2025-12-02 00:12:12.370530548 +0000 UTC m=+359.396898592" Dec 02 00:12:29 crc kubenswrapper[4856]: I1202 00:12:29.410439 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:12:29 crc kubenswrapper[4856]: I1202 00:12:29.411802 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" podUID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" containerName="route-controller-manager" containerID="cri-o://2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9" gracePeriod=30 Dec 02 00:12:29 crc kubenswrapper[4856]: I1202 00:12:29.892890 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.014539 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca\") pod \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.014653 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fktl\" (UniqueName: \"kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl\") pod \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.014694 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config\") pod \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.014780 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert\") pod \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\" (UID: \"322c5833-2f45-4dbd-8b7f-8c16c9bf0013\") " Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.015476 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca" (OuterVolumeSpecName: "client-ca") pod "322c5833-2f45-4dbd-8b7f-8c16c9bf0013" (UID: "322c5833-2f45-4dbd-8b7f-8c16c9bf0013"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.015529 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config" (OuterVolumeSpecName: "config") pod "322c5833-2f45-4dbd-8b7f-8c16c9bf0013" (UID: "322c5833-2f45-4dbd-8b7f-8c16c9bf0013"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.019742 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "322c5833-2f45-4dbd-8b7f-8c16c9bf0013" (UID: "322c5833-2f45-4dbd-8b7f-8c16c9bf0013"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.021254 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl" (OuterVolumeSpecName: "kube-api-access-2fktl") pod "322c5833-2f45-4dbd-8b7f-8c16c9bf0013" (UID: "322c5833-2f45-4dbd-8b7f-8c16c9bf0013"). InnerVolumeSpecName "kube-api-access-2fktl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.116240 4856 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.116277 4856 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.116288 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fktl\" (UniqueName: \"kubernetes.io/projected/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-kube-api-access-2fktl\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.116298 4856 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322c5833-2f45-4dbd-8b7f-8c16c9bf0013-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.455761 4856 generic.go:334] "Generic (PLEG): container finished" podID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" containerID="2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9" exitCode=0 Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.455832 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.455855 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" event={"ID":"322c5833-2f45-4dbd-8b7f-8c16c9bf0013","Type":"ContainerDied","Data":"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9"} Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.456463 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646" event={"ID":"322c5833-2f45-4dbd-8b7f-8c16c9bf0013","Type":"ContainerDied","Data":"cdb68b361dc46f303c88b1afb9cba76a0592870b88b0032f00c1f7aa124b6c32"} Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.456544 4856 scope.go:117] "RemoveContainer" containerID="2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.478269 4856 scope.go:117] "RemoveContainer" containerID="2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9" Dec 02 00:12:30 crc kubenswrapper[4856]: E1202 00:12:30.478823 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9\": container with ID starting with 2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9 not found: ID does not exist" containerID="2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.478893 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9"} err="failed to get container status \"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9\": rpc error: code = NotFound desc = could not find container \"2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9\": container with ID starting with 2efce29f1c8597420feb1ec87c8090eb581e36090d116c008c2c44159db51ff9 not found: ID does not exist" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.500183 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.506465 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-m7646"] Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.815498 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7"] Dec 02 00:12:30 crc kubenswrapper[4856]: E1202 00:12:30.815827 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" containerName="route-controller-manager" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.815847 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" containerName="route-controller-manager" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.816054 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" containerName="route-controller-manager" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.816759 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.818351 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.819787 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.822967 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.825068 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.825109 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.825262 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.835689 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7"] Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.927734 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-config\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.927794 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-client-ca\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.927837 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcsw2\" (UniqueName: \"kubernetes.io/projected/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-kube-api-access-qcsw2\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:30 crc kubenswrapper[4856]: I1202 00:12:30.927889 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-serving-cert\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.028971 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-serving-cert\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.029042 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-config\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.029095 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-client-ca\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.029141 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcsw2\" (UniqueName: \"kubernetes.io/projected/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-kube-api-access-qcsw2\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.031818 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-client-ca\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.032186 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-config\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.051083 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-serving-cert\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.058957 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcsw2\" (UniqueName: \"kubernetes.io/projected/74ae655b-7e4d-4e54-81a2-0a5153dbdf54-kube-api-access-qcsw2\") pod \"route-controller-manager-cd7fb44d6-zjxl7\" (UID: \"74ae655b-7e4d-4e54-81a2-0a5153dbdf54\") " pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.146738 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.265712 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="322c5833-2f45-4dbd-8b7f-8c16c9bf0013" path="/var/lib/kubelet/pods/322c5833-2f45-4dbd-8b7f-8c16c9bf0013/volumes" Dec 02 00:12:31 crc kubenswrapper[4856]: I1202 00:12:31.590926 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7"] Dec 02 00:12:32 crc kubenswrapper[4856]: I1202 00:12:32.467897 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" event={"ID":"74ae655b-7e4d-4e54-81a2-0a5153dbdf54","Type":"ContainerStarted","Data":"ac0446103199ba8e2e8ddc1c99d36f4ab6829a51c78181d6ba30de1acc27db4c"} Dec 02 00:12:32 crc kubenswrapper[4856]: I1202 00:12:32.468225 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" event={"ID":"74ae655b-7e4d-4e54-81a2-0a5153dbdf54","Type":"ContainerStarted","Data":"54da7dfea42df7f22c0fb1bc95dd375be217a3759660bee6d4501ab2e5f57dc2"} Dec 02 00:12:32 crc kubenswrapper[4856]: I1202 00:12:32.468245 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:32 crc kubenswrapper[4856]: I1202 00:12:32.473736 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" Dec 02 00:12:32 crc kubenswrapper[4856]: I1202 00:12:32.486870 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cd7fb44d6-zjxl7" podStartSLOduration=3.486852866 podStartE2EDuration="3.486852866s" podCreationTimestamp="2025-12-02 00:12:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:12:32.485765819 +0000 UTC m=+379.512133823" watchObservedRunningTime="2025-12-02 00:12:32.486852866 +0000 UTC m=+379.513220870" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.356236 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zt4pb"] Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.357368 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.371561 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zt4pb"] Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475099 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9418c692-6e5d-4a19-bce9-f2279c5c788a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475140 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9418c692-6e5d-4a19-bce9-f2279c5c788a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475187 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-bound-sa-token\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475204 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s646r\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-kube-api-access-s646r\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475264 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-tls\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475313 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-trusted-ca\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475335 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.475362 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-certificates\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.504138 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.576202 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-tls\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.576260 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-trusted-ca\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.576294 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-certificates\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.576990 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9418c692-6e5d-4a19-bce9-f2279c5c788a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.577036 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9418c692-6e5d-4a19-bce9-f2279c5c788a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.577582 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9418c692-6e5d-4a19-bce9-f2279c5c788a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.577647 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-bound-sa-token\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.577690 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s646r\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-kube-api-access-s646r\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.578026 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-trusted-ca\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.579341 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-certificates\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.594198 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9418c692-6e5d-4a19-bce9-f2279c5c788a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.594248 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-registry-tls\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.598423 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s646r\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-kube-api-access-s646r\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.601143 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9418c692-6e5d-4a19-bce9-f2279c5c788a-bound-sa-token\") pod \"image-registry-66df7c8f76-zt4pb\" (UID: \"9418c692-6e5d-4a19-bce9-f2279c5c788a\") " pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:34 crc kubenswrapper[4856]: I1202 00:12:34.674636 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.061621 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.062058 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.066395 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zt4pb"] Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.491725 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" event={"ID":"9418c692-6e5d-4a19-bce9-f2279c5c788a","Type":"ContainerStarted","Data":"d55ffc2511066fee88dc63deee0aaf88d74bd605e3eade0e91ef13c9d09acb2f"} Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.492133 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" event={"ID":"9418c692-6e5d-4a19-bce9-f2279c5c788a","Type":"ContainerStarted","Data":"4333ac30fc4d7ded74efa833ffb03abcdaa25a78bf0e7f1b0772688a9cdea517"} Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.492202 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:35 crc kubenswrapper[4856]: I1202 00:12:35.526438 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" podStartSLOduration=1.526417436 podStartE2EDuration="1.526417436s" podCreationTimestamp="2025-12-02 00:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:12:35.523333519 +0000 UTC m=+382.549701553" watchObservedRunningTime="2025-12-02 00:12:35.526417436 +0000 UTC m=+382.552785470" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.672968 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.674142 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xzzbh" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="registry-server" containerID="cri-o://8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb" gracePeriod=30 Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.681622 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.683103 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j2276" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="registry-server" containerID="cri-o://7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460" gracePeriod=30 Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.689496 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.689826 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" containerID="cri-o://7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b" gracePeriod=30 Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.699895 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.700202 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ckmlx" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="registry-server" containerID="cri-o://ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" gracePeriod=30 Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.709420 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.709711 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5r988" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="registry-server" containerID="cri-o://db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0" gracePeriod=30 Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.720688 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r74k"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.722090 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.736833 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r74k"] Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.843311 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgxck\" (UniqueName: \"kubernetes.io/projected/d33333ae-222b-4e6a-9c34-279172c4f292-kube-api-access-wgxck\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.843372 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.843428 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.944750 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.944883 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgxck\" (UniqueName: \"kubernetes.io/projected/d33333ae-222b-4e6a-9c34-279172c4f292-kube-api-access-wgxck\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.944912 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.947024 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.956129 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d33333ae-222b-4e6a-9c34-279172c4f292-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:38 crc kubenswrapper[4856]: I1202 00:12:38.965435 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgxck\" (UniqueName: \"kubernetes.io/projected/d33333ae-222b-4e6a-9c34-279172c4f292-kube-api-access-wgxck\") pod \"marketplace-operator-79b997595-7r74k\" (UID: \"d33333ae-222b-4e6a-9c34-279172c4f292\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.057734 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.201180 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.351480 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities\") pod \"6efab283-f656-41a4-8996-4aee7986e931\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.351671 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l2km\" (UniqueName: \"kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km\") pod \"6efab283-f656-41a4-8996-4aee7986e931\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.351744 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content\") pod \"6efab283-f656-41a4-8996-4aee7986e931\" (UID: \"6efab283-f656-41a4-8996-4aee7986e931\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.352329 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities" (OuterVolumeSpecName: "utilities") pod "6efab283-f656-41a4-8996-4aee7986e931" (UID: "6efab283-f656-41a4-8996-4aee7986e931"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.359686 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km" (OuterVolumeSpecName: "kube-api-access-5l2km") pod "6efab283-f656-41a4-8996-4aee7986e931" (UID: "6efab283-f656-41a4-8996-4aee7986e931"). InnerVolumeSpecName "kube-api-access-5l2km". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.375216 4856 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c is running failed: container process not found" containerID="ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.376853 4856 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c is running failed: container process not found" containerID="ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.377571 4856 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c is running failed: container process not found" containerID="ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.377673 4856 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-ckmlx" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="registry-server" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.417960 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6efab283-f656-41a4-8996-4aee7986e931" (UID: "6efab283-f656-41a4-8996-4aee7986e931"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.453259 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l2km\" (UniqueName: \"kubernetes.io/projected/6efab283-f656-41a4-8996-4aee7986e931-kube-api-access-5l2km\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.453313 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.453333 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6efab283-f656-41a4-8996-4aee7986e931-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.483149 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.503412 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.516609 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.516976 4856 generic.go:334] "Generic (PLEG): container finished" podID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerID="ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" exitCode=0 Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.517047 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerDied","Data":"ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.517074 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ckmlx" event={"ID":"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd","Type":"ContainerDied","Data":"7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.517087 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c72e0d7242a05bf64aabc7e14fecc92aead0cf3dd5d3dcd3e34a3e207a1b353" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.519094 4856 generic.go:334] "Generic (PLEG): container finished" podID="6efab283-f656-41a4-8996-4aee7986e931" containerID="8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb" exitCode=0 Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.519137 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzzbh" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.519153 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerDied","Data":"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.519184 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzzbh" event={"ID":"6efab283-f656-41a4-8996-4aee7986e931","Type":"ContainerDied","Data":"001af3b1fecbf945210762b6da8d55c261e19db67559d7f8b07910c030e15cf7"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.519208 4856 scope.go:117] "RemoveContainer" containerID="8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.523492 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.525231 4856 generic.go:334] "Generic (PLEG): container finished" podID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerID="db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0" exitCode=0 Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.525294 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5r988" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.525321 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerDied","Data":"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.525365 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5r988" event={"ID":"a1959db2-ac9e-4cfd-8afa-487e59d4177a","Type":"ContainerDied","Data":"5e4318ea2f524a6ae4504ed33c60e24617674495b7b2ed579d964e603abbd0b0"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.539415 4856 generic.go:334] "Generic (PLEG): container finished" podID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerID="7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b" exitCode=0 Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.539502 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerDied","Data":"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.539553 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" event={"ID":"7530965f-940a-4ac6-8dc6-be67d35a2f08","Type":"ContainerDied","Data":"d0f8f8885ba2ee64eb2cb6b36df0a3f4a753f36f83b64e41be5932d0c4bea550"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.539750 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dllbd" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.542941 4856 scope.go:117] "RemoveContainer" containerID="c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.547903 4856 generic.go:334] "Generic (PLEG): container finished" podID="884edeee-5df9-4820-be36-38b7095706ef" containerID="7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460" exitCode=0 Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.547998 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerDied","Data":"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.548029 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j2276" event={"ID":"884edeee-5df9-4820-be36-38b7095706ef","Type":"ContainerDied","Data":"e6a985502718d50c06d3f686a9761ccd470842194856d767eaa3e1d662785bdf"} Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.548029 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j2276" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.555488 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q98rj\" (UniqueName: \"kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj\") pod \"884edeee-5df9-4820-be36-38b7095706ef\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.555548 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content\") pod \"884edeee-5df9-4820-be36-38b7095706ef\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.555636 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities\") pod \"884edeee-5df9-4820-be36-38b7095706ef\" (UID: \"884edeee-5df9-4820-be36-38b7095706ef\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.556682 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities" (OuterVolumeSpecName: "utilities") pod "884edeee-5df9-4820-be36-38b7095706ef" (UID: "884edeee-5df9-4820-be36-38b7095706ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.577768 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj" (OuterVolumeSpecName: "kube-api-access-q98rj") pod "884edeee-5df9-4820-be36-38b7095706ef" (UID: "884edeee-5df9-4820-be36-38b7095706ef"). InnerVolumeSpecName "kube-api-access-q98rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.584960 4856 scope.go:117] "RemoveContainer" containerID="ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.602381 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.607238 4856 scope.go:117] "RemoveContainer" containerID="8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.607421 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xzzbh"] Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.608383 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb\": container with ID starting with 8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb not found: ID does not exist" containerID="8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.608428 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb"} err="failed to get container status \"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb\": rpc error: code = NotFound desc = could not find container \"8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb\": container with ID starting with 8d33e77a6a1e76819515da235ef1d7ea14fd7f6390f595077115b5dda4c961fb not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.608445 4856 scope.go:117] "RemoveContainer" containerID="c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.610658 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732\": container with ID starting with c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732 not found: ID does not exist" containerID="c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.610685 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732"} err="failed to get container status \"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732\": rpc error: code = NotFound desc = could not find container \"c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732\": container with ID starting with c4403aaa286331005391fea6aed8a7273e0471a4d6d53a02c4116beefc45a732 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.610700 4856 scope.go:117] "RemoveContainer" containerID="ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.611748 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287\": container with ID starting with ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287 not found: ID does not exist" containerID="ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.611789 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287"} err="failed to get container status \"ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287\": rpc error: code = NotFound desc = could not find container \"ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287\": container with ID starting with ccb3ec777c0fbea5b9be4ff2176db0b67e048195e93bdc58394bdbdc81862287 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.611837 4856 scope.go:117] "RemoveContainer" containerID="db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.626566 4856 scope.go:117] "RemoveContainer" containerID="91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.627357 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "884edeee-5df9-4820-be36-38b7095706ef" (UID: "884edeee-5df9-4820-be36-38b7095706ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.645947 4856 scope.go:117] "RemoveContainer" containerID="9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.655256 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r74k"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.656815 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2f6g2\" (UniqueName: \"kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2\") pod \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.656877 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcb7j\" (UniqueName: \"kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j\") pod \"7530965f-940a-4ac6-8dc6-be67d35a2f08\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.656943 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content\") pod \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.656979 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities\") pod \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\" (UID: \"fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657017 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities\") pod \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657064 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca\") pod \"7530965f-940a-4ac6-8dc6-be67d35a2f08\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657114 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmw47\" (UniqueName: \"kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47\") pod \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657154 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics\") pod \"7530965f-940a-4ac6-8dc6-be67d35a2f08\" (UID: \"7530965f-940a-4ac6-8dc6-be67d35a2f08\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657183 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content\") pod \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\" (UID: \"a1959db2-ac9e-4cfd-8afa-487e59d4177a\") " Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657432 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q98rj\" (UniqueName: \"kubernetes.io/projected/884edeee-5df9-4820-be36-38b7095706ef-kube-api-access-q98rj\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657454 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.657467 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/884edeee-5df9-4820-be36-38b7095706ef-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.658570 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7530965f-940a-4ac6-8dc6-be67d35a2f08" (UID: "7530965f-940a-4ac6-8dc6-be67d35a2f08"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.660569 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities" (OuterVolumeSpecName: "utilities") pod "a1959db2-ac9e-4cfd-8afa-487e59d4177a" (UID: "a1959db2-ac9e-4cfd-8afa-487e59d4177a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.661564 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2" (OuterVolumeSpecName: "kube-api-access-2f6g2") pod "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" (UID: "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd"). InnerVolumeSpecName "kube-api-access-2f6g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.661942 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j" (OuterVolumeSpecName: "kube-api-access-wcb7j") pod "7530965f-940a-4ac6-8dc6-be67d35a2f08" (UID: "7530965f-940a-4ac6-8dc6-be67d35a2f08"). InnerVolumeSpecName "kube-api-access-wcb7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.662035 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities" (OuterVolumeSpecName: "utilities") pod "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" (UID: "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.662532 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7530965f-940a-4ac6-8dc6-be67d35a2f08" (UID: "7530965f-940a-4ac6-8dc6-be67d35a2f08"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.662622 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47" (OuterVolumeSpecName: "kube-api-access-lmw47") pod "a1959db2-ac9e-4cfd-8afa-487e59d4177a" (UID: "a1959db2-ac9e-4cfd-8afa-487e59d4177a"). InnerVolumeSpecName "kube-api-access-lmw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.663441 4856 scope.go:117] "RemoveContainer" containerID="db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.664110 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0\": container with ID starting with db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0 not found: ID does not exist" containerID="db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.664226 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0"} err="failed to get container status \"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0\": rpc error: code = NotFound desc = could not find container \"db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0\": container with ID starting with db6d48853372532e4db9a6df4614e78907a8c91d8572f2507db9e76bfb4111f0 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.664327 4856 scope.go:117] "RemoveContainer" containerID="91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.664845 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4\": container with ID starting with 91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4 not found: ID does not exist" containerID="91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.664929 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4"} err="failed to get container status \"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4\": rpc error: code = NotFound desc = could not find container \"91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4\": container with ID starting with 91cb2d7c3939a3ea578c06d095eefecdd61b4686b2b37939c12f7cdd1fb89dc4 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.665093 4856 scope.go:117] "RemoveContainer" containerID="9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.665557 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c\": container with ID starting with 9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c not found: ID does not exist" containerID="9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.665583 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c"} err="failed to get container status \"9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c\": rpc error: code = NotFound desc = could not find container \"9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c\": container with ID starting with 9afc5b5080253fbb7f37c141b754d9bdce9fa9f4fe49237e5f9232dc71c0d81c not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.665616 4856 scope.go:117] "RemoveContainer" containerID="7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.678333 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" (UID: "fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.679437 4856 scope.go:117] "RemoveContainer" containerID="7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.693925 4856 scope.go:117] "RemoveContainer" containerID="7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.694299 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b\": container with ID starting with 7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b not found: ID does not exist" containerID="7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.694344 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b"} err="failed to get container status \"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b\": rpc error: code = NotFound desc = could not find container \"7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b\": container with ID starting with 7455ac68aaf7868e61ae214c1dfbe6d979100ed55d1e2ffc353ef55a6cdf7d7b not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.694368 4856 scope.go:117] "RemoveContainer" containerID="7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.694674 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c\": container with ID starting with 7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c not found: ID does not exist" containerID="7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.694692 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c"} err="failed to get container status \"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c\": rpc error: code = NotFound desc = could not find container \"7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c\": container with ID starting with 7ecedc68031c6f576af14cc863dd7c89663fee5d5e039e47542d11f5c383272c not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.694705 4856 scope.go:117] "RemoveContainer" containerID="7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.711237 4856 scope.go:117] "RemoveContainer" containerID="8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.744265 4856 scope.go:117] "RemoveContainer" containerID="28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.758413 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2f6g2\" (UniqueName: \"kubernetes.io/projected/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-kube-api-access-2f6g2\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.758539 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcb7j\" (UniqueName: \"kubernetes.io/projected/7530965f-940a-4ac6-8dc6-be67d35a2f08-kube-api-access-wcb7j\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759007 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759111 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759197 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759281 4856 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759365 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmw47\" (UniqueName: \"kubernetes.io/projected/a1959db2-ac9e-4cfd-8afa-487e59d4177a-kube-api-access-lmw47\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.759447 4856 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7530965f-940a-4ac6-8dc6-be67d35a2f08-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.772361 4856 scope.go:117] "RemoveContainer" containerID="7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.772915 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460\": container with ID starting with 7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460 not found: ID does not exist" containerID="7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.773008 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460"} err="failed to get container status \"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460\": rpc error: code = NotFound desc = could not find container \"7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460\": container with ID starting with 7d212f5f90b1b1cfb63b41cebff940b88890236316f4d59523f287bb18177460 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.773105 4856 scope.go:117] "RemoveContainer" containerID="8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.774057 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367\": container with ID starting with 8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367 not found: ID does not exist" containerID="8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.774106 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367"} err="failed to get container status \"8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367\": rpc error: code = NotFound desc = could not find container \"8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367\": container with ID starting with 8c3363f3b01b87e6d71e2494eae4d82ba79e8980eb6db8d9ac4af13ebfed3367 not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.774130 4856 scope.go:117] "RemoveContainer" containerID="28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c" Dec 02 00:12:39 crc kubenswrapper[4856]: E1202 00:12:39.774642 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c\": container with ID starting with 28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c not found: ID does not exist" containerID="28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.774753 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c"} err="failed to get container status \"28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c\": rpc error: code = NotFound desc = could not find container \"28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c\": container with ID starting with 28d9473a0ececf6e846e7428a9bb906ac221b663b077c276f33b137e266cde3c not found: ID does not exist" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.787791 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1959db2-ac9e-4cfd-8afa-487e59d4177a" (UID: "a1959db2-ac9e-4cfd-8afa-487e59d4177a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.858873 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.860578 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1959db2-ac9e-4cfd-8afa-487e59d4177a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.864510 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5r988"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.884402 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.890369 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dllbd"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.896581 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:12:39 crc kubenswrapper[4856]: I1202 00:12:39.900221 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j2276"] Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.569443 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" event={"ID":"d33333ae-222b-4e6a-9c34-279172c4f292","Type":"ContainerStarted","Data":"a21c88f15980a5fe0c472468b992a9f77ee4a154772d82a3bcde5069880e91b1"} Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.569481 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" event={"ID":"d33333ae-222b-4e6a-9c34-279172c4f292","Type":"ContainerStarted","Data":"65f0c77acf725e53ee542438cf24917e800e68abf5f79ca87d55df04db9695e0"} Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.569702 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.575084 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.575870 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ckmlx" Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.594770 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7r74k" podStartSLOduration=2.5947559030000003 podStartE2EDuration="2.594755903s" podCreationTimestamp="2025-12-02 00:12:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:12:40.591970533 +0000 UTC m=+387.618338537" watchObservedRunningTime="2025-12-02 00:12:40.594755903 +0000 UTC m=+387.621123897" Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.634878 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:12:40 crc kubenswrapper[4856]: I1202 00:12:40.638196 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ckmlx"] Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092066 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bgmrg"] Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092351 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092367 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092382 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092390 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092403 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092411 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092420 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092428 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092440 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092447 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092456 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092463 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092472 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092480 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092488 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092495 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092506 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092512 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092523 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092529 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092540 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092548 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092558 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092566 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="extract-utilities" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092576 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092583 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="extract-content" Dec 02 00:12:41 crc kubenswrapper[4856]: E1202 00:12:41.092616 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092623 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092728 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="884edeee-5df9-4820-be36-38b7095706ef" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092740 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092749 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="6efab283-f656-41a4-8996-4aee7986e931" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092760 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092771 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" containerName="marketplace-operator" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.092780 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" containerName="registry-server" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.093727 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.099457 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.104223 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bgmrg"] Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.178844 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6gpv\" (UniqueName: \"kubernetes.io/projected/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-kube-api-access-s6gpv\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.179103 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-catalog-content\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.179223 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-utilities\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.259749 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6efab283-f656-41a4-8996-4aee7986e931" path="/var/lib/kubelet/pods/6efab283-f656-41a4-8996-4aee7986e931/volumes" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.260462 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7530965f-940a-4ac6-8dc6-be67d35a2f08" path="/var/lib/kubelet/pods/7530965f-940a-4ac6-8dc6-be67d35a2f08/volumes" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.261016 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="884edeee-5df9-4820-be36-38b7095706ef" path="/var/lib/kubelet/pods/884edeee-5df9-4820-be36-38b7095706ef/volumes" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.262273 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1959db2-ac9e-4cfd-8afa-487e59d4177a" path="/var/lib/kubelet/pods/a1959db2-ac9e-4cfd-8afa-487e59d4177a/volumes" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.262939 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd" path="/var/lib/kubelet/pods/fcc97516-b9c2-49cd-b9f5-c6cabfe4a3bd/volumes" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.279835 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-utilities\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.279904 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6gpv\" (UniqueName: \"kubernetes.io/projected/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-kube-api-access-s6gpv\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.279926 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-catalog-content\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.280310 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-catalog-content\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.280509 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-utilities\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.298344 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6gpv\" (UniqueName: \"kubernetes.io/projected/96d0fbac-8ecc-490b-a58e-0dfb4303f2b8-kube-api-access-s6gpv\") pod \"certified-operators-bgmrg\" (UID: \"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8\") " pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.423807 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:41 crc kubenswrapper[4856]: I1202 00:12:41.834077 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bgmrg"] Dec 02 00:12:41 crc kubenswrapper[4856]: W1202 00:12:41.840010 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96d0fbac_8ecc_490b_a58e_0dfb4303f2b8.slice/crio-6b489e8e1938a4f4b6feae3c3debd175a2d2bea5731795c74a4ae731dcca0a5b WatchSource:0}: Error finding container 6b489e8e1938a4f4b6feae3c3debd175a2d2bea5731795c74a4ae731dcca0a5b: Status 404 returned error can't find the container with id 6b489e8e1938a4f4b6feae3c3debd175a2d2bea5731795c74a4ae731dcca0a5b Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.589027 4856 generic.go:334] "Generic (PLEG): container finished" podID="96d0fbac-8ecc-490b-a58e-0dfb4303f2b8" containerID="37ec64b8b1b9840c60755d8b4e0349d17298812fb5143423f56ece2cb747b238" exitCode=0 Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.589089 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgmrg" event={"ID":"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8","Type":"ContainerDied","Data":"37ec64b8b1b9840c60755d8b4e0349d17298812fb5143423f56ece2cb747b238"} Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.589134 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgmrg" event={"ID":"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8","Type":"ContainerStarted","Data":"6b489e8e1938a4f4b6feae3c3debd175a2d2bea5731795c74a4ae731dcca0a5b"} Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.892417 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lfjkr"] Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.893581 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.898739 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 00:12:42 crc kubenswrapper[4856]: I1202 00:12:42.921797 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lfjkr"] Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.002890 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7pkn\" (UniqueName: \"kubernetes.io/projected/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-kube-api-access-h7pkn\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.002958 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-catalog-content\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.002995 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-utilities\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.103871 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-utilities\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.103981 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7pkn\" (UniqueName: \"kubernetes.io/projected/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-kube-api-access-h7pkn\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.104007 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-catalog-content\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.104410 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-utilities\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.104426 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-catalog-content\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.130183 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7pkn\" (UniqueName: \"kubernetes.io/projected/ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46-kube-api-access-h7pkn\") pod \"redhat-operators-lfjkr\" (UID: \"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46\") " pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.209363 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.485644 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zbmx9"] Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.489677 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.498414 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.500541 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zbmx9"] Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.587070 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lfjkr"] Dec 02 00:12:43 crc kubenswrapper[4856]: W1202 00:12:43.592209 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff3ec1c6_ecaf_4d6b_9fa4_ad20e2698f46.slice/crio-4997d2d81825add09ec054ea3b8398b56a7264bd1e15378e3fa870c3cb0d88ea WatchSource:0}: Error finding container 4997d2d81825add09ec054ea3b8398b56a7264bd1e15378e3fa870c3cb0d88ea: Status 404 returned error can't find the container with id 4997d2d81825add09ec054ea3b8398b56a7264bd1e15378e3fa870c3cb0d88ea Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.610547 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-catalog-content\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.610653 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-utilities\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.610698 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbjh2\" (UniqueName: \"kubernetes.io/projected/c3c49f31-0b5f-4eff-8d85-b4296f173280-kube-api-access-dbjh2\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.711655 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbjh2\" (UniqueName: \"kubernetes.io/projected/c3c49f31-0b5f-4eff-8d85-b4296f173280-kube-api-access-dbjh2\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.711781 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-catalog-content\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.712337 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-catalog-content\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.712403 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-utilities\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.712652 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3c49f31-0b5f-4eff-8d85-b4296f173280-utilities\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.731944 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbjh2\" (UniqueName: \"kubernetes.io/projected/c3c49f31-0b5f-4eff-8d85-b4296f173280-kube-api-access-dbjh2\") pod \"community-operators-zbmx9\" (UID: \"c3c49f31-0b5f-4eff-8d85-b4296f173280\") " pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:43 crc kubenswrapper[4856]: I1202 00:12:43.827328 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.203445 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zbmx9"] Dec 02 00:12:44 crc kubenswrapper[4856]: W1202 00:12:44.205876 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3c49f31_0b5f_4eff_8d85_b4296f173280.slice/crio-54635f9862390f322a73211840b41ef2fbf76ec164b88b9b676e318ff213ff63 WatchSource:0}: Error finding container 54635f9862390f322a73211840b41ef2fbf76ec164b88b9b676e318ff213ff63: Status 404 returned error can't find the container with id 54635f9862390f322a73211840b41ef2fbf76ec164b88b9b676e318ff213ff63 Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.609247 4856 generic.go:334] "Generic (PLEG): container finished" podID="ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46" containerID="09cbba03f5a381416548f489a3212bf0dfec181bddfc74c52253cd4cd0ed831f" exitCode=0 Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.609445 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfjkr" event={"ID":"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46","Type":"ContainerDied","Data":"09cbba03f5a381416548f489a3212bf0dfec181bddfc74c52253cd4cd0ed831f"} Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.609698 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfjkr" event={"ID":"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46","Type":"ContainerStarted","Data":"4997d2d81825add09ec054ea3b8398b56a7264bd1e15378e3fa870c3cb0d88ea"} Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.613709 4856 generic.go:334] "Generic (PLEG): container finished" podID="96d0fbac-8ecc-490b-a58e-0dfb4303f2b8" containerID="446982defabc051eaac96a4e2b2dab495a24876c55145ef869a692cd258fb1de" exitCode=0 Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.613755 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgmrg" event={"ID":"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8","Type":"ContainerDied","Data":"446982defabc051eaac96a4e2b2dab495a24876c55145ef869a692cd258fb1de"} Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.616399 4856 generic.go:334] "Generic (PLEG): container finished" podID="c3c49f31-0b5f-4eff-8d85-b4296f173280" containerID="995ae82dab3e349cccef2b0538eafa1348764147299638569d967ec5738e543d" exitCode=0 Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.616446 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zbmx9" event={"ID":"c3c49f31-0b5f-4eff-8d85-b4296f173280","Type":"ContainerDied","Data":"995ae82dab3e349cccef2b0538eafa1348764147299638569d967ec5738e543d"} Dec 02 00:12:44 crc kubenswrapper[4856]: I1202 00:12:44.616477 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zbmx9" event={"ID":"c3c49f31-0b5f-4eff-8d85-b4296f173280","Type":"ContainerStarted","Data":"54635f9862390f322a73211840b41ef2fbf76ec164b88b9b676e318ff213ff63"} Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.295662 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.296988 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.299535 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.308647 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.458942 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk9sc\" (UniqueName: \"kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.458986 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.459246 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.560542 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk9sc\" (UniqueName: \"kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.560605 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.560652 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.561104 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.561377 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.581512 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk9sc\" (UniqueName: \"kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc\") pod \"redhat-marketplace-sdbs6\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.623257 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfjkr" event={"ID":"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46","Type":"ContainerStarted","Data":"3620ad5773ec9c09595b74ceeec80ec017ef69e4cf182c81145d416243a1313f"} Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.626582 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgmrg" event={"ID":"96d0fbac-8ecc-490b-a58e-0dfb4303f2b8","Type":"ContainerStarted","Data":"927a0c4a35eca95451f03b790de1c364e7bb644607230cab9ad7cb7d8858f7eb"} Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.628450 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zbmx9" event={"ID":"c3c49f31-0b5f-4eff-8d85-b4296f173280","Type":"ContainerStarted","Data":"6d0db292e5db7ac662574ab75a0ebacf051e84becff19aaad7dcce26c810597f"} Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.665869 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:45 crc kubenswrapper[4856]: I1202 00:12:45.695076 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bgmrg" podStartSLOduration=2.10075312 podStartE2EDuration="4.695058889s" podCreationTimestamp="2025-12-02 00:12:41 +0000 UTC" firstStartedPulling="2025-12-02 00:12:42.590930212 +0000 UTC m=+389.617298216" lastFinishedPulling="2025-12-02 00:12:45.185235981 +0000 UTC m=+392.211603985" observedRunningTime="2025-12-02 00:12:45.689386684 +0000 UTC m=+392.715754688" watchObservedRunningTime="2025-12-02 00:12:45.695058889 +0000 UTC m=+392.721426893" Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.144049 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.636226 4856 generic.go:334] "Generic (PLEG): container finished" podID="c3c49f31-0b5f-4eff-8d85-b4296f173280" containerID="6d0db292e5db7ac662574ab75a0ebacf051e84becff19aaad7dcce26c810597f" exitCode=0 Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.636337 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zbmx9" event={"ID":"c3c49f31-0b5f-4eff-8d85-b4296f173280","Type":"ContainerDied","Data":"6d0db292e5db7ac662574ab75a0ebacf051e84becff19aaad7dcce26c810597f"} Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.639657 4856 generic.go:334] "Generic (PLEG): container finished" podID="f62c12f8-4128-49e1-b40e-186775fff67c" containerID="169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539" exitCode=0 Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.639719 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerDied","Data":"169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539"} Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.639745 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerStarted","Data":"ba19ebf83a5d63ca0568772ce017e81651d9e15d896f2238698cd6ca8a0996c7"} Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.642020 4856 generic.go:334] "Generic (PLEG): container finished" podID="ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46" containerID="3620ad5773ec9c09595b74ceeec80ec017ef69e4cf182c81145d416243a1313f" exitCode=0 Dec 02 00:12:46 crc kubenswrapper[4856]: I1202 00:12:46.642104 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfjkr" event={"ID":"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46","Type":"ContainerDied","Data":"3620ad5773ec9c09595b74ceeec80ec017ef69e4cf182c81145d416243a1313f"} Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.653127 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zbmx9" event={"ID":"c3c49f31-0b5f-4eff-8d85-b4296f173280","Type":"ContainerStarted","Data":"768e8537ad3c092427f0bfd9cb16dd4a92456cd0e2f7eaa44d1af75d0826548e"} Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.655186 4856 generic.go:334] "Generic (PLEG): container finished" podID="f62c12f8-4128-49e1-b40e-186775fff67c" containerID="1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255" exitCode=0 Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.655260 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerDied","Data":"1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255"} Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.657364 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lfjkr" event={"ID":"ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46","Type":"ContainerStarted","Data":"8613a65a1914a17869de8835cf5b1f25a8d9a7028ea2aab972e1c18d848a0247"} Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.672707 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zbmx9" podStartSLOduration=3.104144711 podStartE2EDuration="5.672692689s" podCreationTimestamp="2025-12-02 00:12:43 +0000 UTC" firstStartedPulling="2025-12-02 00:12:44.617356792 +0000 UTC m=+391.643724796" lastFinishedPulling="2025-12-02 00:12:47.18590477 +0000 UTC m=+394.212272774" observedRunningTime="2025-12-02 00:12:48.670831422 +0000 UTC m=+395.697199426" watchObservedRunningTime="2025-12-02 00:12:48.672692689 +0000 UTC m=+395.699060693" Dec 02 00:12:48 crc kubenswrapper[4856]: I1202 00:12:48.685288 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lfjkr" podStartSLOduration=4.219544894 podStartE2EDuration="6.685270886s" podCreationTimestamp="2025-12-02 00:12:42 +0000 UTC" firstStartedPulling="2025-12-02 00:12:44.612126584 +0000 UTC m=+391.638494588" lastFinishedPulling="2025-12-02 00:12:47.077852576 +0000 UTC m=+394.104220580" observedRunningTime="2025-12-02 00:12:48.68420314 +0000 UTC m=+395.710571144" watchObservedRunningTime="2025-12-02 00:12:48.685270886 +0000 UTC m=+395.711638890" Dec 02 00:12:50 crc kubenswrapper[4856]: I1202 00:12:50.670705 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerStarted","Data":"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b"} Dec 02 00:12:50 crc kubenswrapper[4856]: I1202 00:12:50.715206 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sdbs6" podStartSLOduration=2.682133626 podStartE2EDuration="5.715178459s" podCreationTimestamp="2025-12-02 00:12:45 +0000 UTC" firstStartedPulling="2025-12-02 00:12:46.640813528 +0000 UTC m=+393.667181532" lastFinishedPulling="2025-12-02 00:12:49.673858361 +0000 UTC m=+396.700226365" observedRunningTime="2025-12-02 00:12:50.710653712 +0000 UTC m=+397.737021716" watchObservedRunningTime="2025-12-02 00:12:50.715178459 +0000 UTC m=+397.741546463" Dec 02 00:12:51 crc kubenswrapper[4856]: I1202 00:12:51.425214 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:51 crc kubenswrapper[4856]: I1202 00:12:51.425263 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:51 crc kubenswrapper[4856]: I1202 00:12:51.467332 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:51 crc kubenswrapper[4856]: I1202 00:12:51.716357 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bgmrg" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.210241 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.210558 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.250006 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.716472 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lfjkr" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.827906 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.828220 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:53 crc kubenswrapper[4856]: I1202 00:12:53.878896 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:54 crc kubenswrapper[4856]: I1202 00:12:54.679180 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-zt4pb" Dec 02 00:12:54 crc kubenswrapper[4856]: I1202 00:12:54.725373 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:12:54 crc kubenswrapper[4856]: I1202 00:12:54.747769 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zbmx9" Dec 02 00:12:55 crc kubenswrapper[4856]: I1202 00:12:55.666904 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:55 crc kubenswrapper[4856]: I1202 00:12:55.668801 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:55 crc kubenswrapper[4856]: I1202 00:12:55.711488 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:12:55 crc kubenswrapper[4856]: I1202 00:12:55.775891 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:13:05 crc kubenswrapper[4856]: I1202 00:13:05.061497 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:13:05 crc kubenswrapper[4856]: I1202 00:13:05.062066 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:13:05 crc kubenswrapper[4856]: I1202 00:13:05.062127 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:13:05 crc kubenswrapper[4856]: I1202 00:13:05.062678 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:13:05 crc kubenswrapper[4856]: I1202 00:13:05.062774 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b" gracePeriod=600 Dec 02 00:13:06 crc kubenswrapper[4856]: I1202 00:13:06.751175 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b" exitCode=0 Dec 02 00:13:06 crc kubenswrapper[4856]: I1202 00:13:06.751244 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b"} Dec 02 00:13:06 crc kubenswrapper[4856]: I1202 00:13:06.752392 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f"} Dec 02 00:13:06 crc kubenswrapper[4856]: I1202 00:13:06.752432 4856 scope.go:117] "RemoveContainer" containerID="f75288f3b8d10a5c367cc47b5e8ef9dae7b29004908ce89fb85691341a004e82" Dec 02 00:13:19 crc kubenswrapper[4856]: I1202 00:13:19.759754 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" podUID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" containerName="registry" containerID="cri-o://066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953" gracePeriod=30 Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.188106 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.271823 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.271923 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.271983 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.272035 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.272146 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.272415 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.272529 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btz9p\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.272694 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token\") pod \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\" (UID: \"eebf27f1-cb8d-4ec3-8982-afb66867cda1\") " Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.273180 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.276064 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.280266 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p" (OuterVolumeSpecName: "kube-api-access-btz9p") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "kube-api-access-btz9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.280776 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.281113 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.281715 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.288665 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.295782 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "eebf27f1-cb8d-4ec3-8982-afb66867cda1" (UID: "eebf27f1-cb8d-4ec3-8982-afb66867cda1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374443 4856 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374518 4856 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/eebf27f1-cb8d-4ec3-8982-afb66867cda1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374542 4856 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374560 4856 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/eebf27f1-cb8d-4ec3-8982-afb66867cda1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374578 4856 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eebf27f1-cb8d-4ec3-8982-afb66867cda1-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374622 4856 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.374640 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btz9p\" (UniqueName: \"kubernetes.io/projected/eebf27f1-cb8d-4ec3-8982-afb66867cda1-kube-api-access-btz9p\") on node \"crc\" DevicePath \"\"" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.856871 4856 generic.go:334] "Generic (PLEG): container finished" podID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" containerID="066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953" exitCode=0 Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.856958 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.856991 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" event={"ID":"eebf27f1-cb8d-4ec3-8982-afb66867cda1","Type":"ContainerDied","Data":"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953"} Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.857570 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2d7ss" event={"ID":"eebf27f1-cb8d-4ec3-8982-afb66867cda1","Type":"ContainerDied","Data":"e3e49a057036e0d634c1ec523485975d3576a57c2813185f10f75e43a44f085c"} Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.857643 4856 scope.go:117] "RemoveContainer" containerID="066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.891764 4856 scope.go:117] "RemoveContainer" containerID="066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953" Dec 02 00:13:20 crc kubenswrapper[4856]: E1202 00:13:20.897072 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953\": container with ID starting with 066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953 not found: ID does not exist" containerID="066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.897175 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953"} err="failed to get container status \"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953\": rpc error: code = NotFound desc = could not find container \"066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953\": container with ID starting with 066d7a58c9e8376a1bdf6804ef473dd7a724ed2a438ba91c63796e94e3702953 not found: ID does not exist" Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.916281 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:13:20 crc kubenswrapper[4856]: I1202 00:13:20.926099 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2d7ss"] Dec 02 00:13:21 crc kubenswrapper[4856]: I1202 00:13:21.265400 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" path="/var/lib/kubelet/pods/eebf27f1-cb8d-4ec3-8982-afb66867cda1/volumes" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.178874 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99"] Dec 02 00:15:00 crc kubenswrapper[4856]: E1202 00:15:00.179967 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" containerName="registry" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.179994 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" containerName="registry" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.180276 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="eebf27f1-cb8d-4ec3-8982-afb66867cda1" containerName="registry" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.181058 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.190956 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.190975 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.201092 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99"] Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.340733 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.341163 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.341192 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nx8k\" (UniqueName: \"kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.442709 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.442762 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.442793 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nx8k\" (UniqueName: \"kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.443941 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.450811 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.472089 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nx8k\" (UniqueName: \"kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k\") pod \"collect-profiles-29410575-2cp99\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.514290 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:00 crc kubenswrapper[4856]: I1202 00:15:00.977871 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99"] Dec 02 00:15:01 crc kubenswrapper[4856]: I1202 00:15:01.943324 4856 generic.go:334] "Generic (PLEG): container finished" podID="46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" containerID="faaf7e6d809d3510063587cd315da26703af5289e7b61d593a266c15f4f1834a" exitCode=0 Dec 02 00:15:01 crc kubenswrapper[4856]: I1202 00:15:01.943395 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" event={"ID":"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a","Type":"ContainerDied","Data":"faaf7e6d809d3510063587cd315da26703af5289e7b61d593a266c15f4f1834a"} Dec 02 00:15:01 crc kubenswrapper[4856]: I1202 00:15:01.943618 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" event={"ID":"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a","Type":"ContainerStarted","Data":"d9087217cc41d3566374e9f40fb9f3a97a4d25e2b1535d0b32806ea5be6972c8"} Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.190680 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.379480 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume\") pod \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.379527 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume\") pod \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.379624 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nx8k\" (UniqueName: \"kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k\") pod \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\" (UID: \"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a\") " Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.380234 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume" (OuterVolumeSpecName: "config-volume") pod "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" (UID: "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.388770 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" (UID: "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.392815 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k" (OuterVolumeSpecName: "kube-api-access-7nx8k") pod "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" (UID: "46782c64-0d60-4ef2-bb6b-cb2b0f59c60a"). InnerVolumeSpecName "kube-api-access-7nx8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.481263 4856 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.481302 4856 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.481316 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nx8k\" (UniqueName: \"kubernetes.io/projected/46782c64-0d60-4ef2-bb6b-cb2b0f59c60a-kube-api-access-7nx8k\") on node \"crc\" DevicePath \"\"" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.958426 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" event={"ID":"46782c64-0d60-4ef2-bb6b-cb2b0f59c60a","Type":"ContainerDied","Data":"d9087217cc41d3566374e9f40fb9f3a97a4d25e2b1535d0b32806ea5be6972c8"} Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.958467 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9087217cc41d3566374e9f40fb9f3a97a4d25e2b1535d0b32806ea5be6972c8" Dec 02 00:15:03 crc kubenswrapper[4856]: I1202 00:15:03.958571 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410575-2cp99" Dec 02 00:15:13 crc kubenswrapper[4856]: I1202 00:15:13.527267 4856 scope.go:117] "RemoveContainer" containerID="2c98168c88d70331f7635fae664e9e920694bed0fbeff98def733da40efa1420" Dec 02 00:15:35 crc kubenswrapper[4856]: I1202 00:15:35.062070 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:15:35 crc kubenswrapper[4856]: I1202 00:15:35.062520 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:16:05 crc kubenswrapper[4856]: I1202 00:16:05.061506 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:16:05 crc kubenswrapper[4856]: I1202 00:16:05.062204 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:16:13 crc kubenswrapper[4856]: I1202 00:16:13.566201 4856 scope.go:117] "RemoveContainer" containerID="ccb996c6e68d72a25f9fa2643b361a0f142e51af582e661781a31e268fa20f8c" Dec 02 00:16:13 crc kubenswrapper[4856]: I1202 00:16:13.600624 4856 scope.go:117] "RemoveContainer" containerID="f20d2a2f1acfd61a38b5ab76e15aba17a4c99e06a5a1690c17b022b8ceb62d04" Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.061700 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.062471 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.062522 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.063140 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.063183 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f" gracePeriod=600 Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.586915 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f" exitCode=0 Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.587003 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f"} Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.587280 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e"} Dec 02 00:16:35 crc kubenswrapper[4856]: I1202 00:16:35.587312 4856 scope.go:117] "RemoveContainer" containerID="8d4f8a0eefd4b993c3e9c453142f8f243d7adda23056d06df873f94022546b0b" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.232776 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l5jg6"] Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.237839 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-controller" containerID="cri-o://5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.238153 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="northd" containerID="cri-o://acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.237946 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-node" containerID="cri-o://ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.237956 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="nbdb" containerID="cri-o://38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.238101 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="sbdb" containerID="cri-o://f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.238015 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-acl-logging" containerID="cri-o://31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.241547 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.291563 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" containerID="cri-o://67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" gracePeriod=30 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.597411 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/3.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.601519 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovn-acl-logging/0.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.602402 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovn-controller/0.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.603437 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646261 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646335 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646361 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646371 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646394 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646454 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646481 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646530 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646550 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646576 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646554 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646649 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646678 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g5ht\" (UniqueName: \"kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646700 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646583 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log" (OuterVolumeSpecName: "node-log") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646708 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646624 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646801 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646690 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646684 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.646732 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647075 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647121 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647253 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647332 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647379 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647398 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647430 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647478 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647447 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647540 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib\") pod \"3051381c-49c8-4217-9831-013ca2931604\" (UID: \"3051381c-49c8-4217-9831-013ca2931604\") " Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647468 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647520 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket" (OuterVolumeSpecName: "log-socket") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647513 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash" (OuterVolumeSpecName: "host-slash") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647926 4856 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647954 4856 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647967 4856 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647980 4856 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.647993 4856 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648007 4856 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648020 4856 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648032 4856 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648045 4856 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648058 4856 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648072 4856 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648084 4856 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648098 4856 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.648110 4856 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.649032 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.649201 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.649382 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.656667 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.656840 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht" (OuterVolumeSpecName: "kube-api-access-2g5ht") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "kube-api-access-2g5ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.666642 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-45h42"] Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.666926 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" containerName="collect-profiles" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.666948 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" containerName="collect-profiles" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.666959 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-node" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.666969 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-node" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.666982 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="sbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.666989 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="sbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667009 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667016 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667025 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="northd" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667034 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="northd" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667043 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667051 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667059 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kubecfg-setup" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667066 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kubecfg-setup" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667074 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667081 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667088 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667096 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667104 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667110 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667119 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667126 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667140 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667146 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667156 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="nbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667162 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="nbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.667171 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-acl-logging" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667179 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-acl-logging" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667281 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667294 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-acl-logging" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667307 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="sbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667319 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-node" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667329 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667337 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667347 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667357 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovn-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667367 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="nbdb" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667376 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667386 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="46782c64-0d60-4ef2-bb6b-cb2b0f59c60a" containerName="collect-profiles" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667394 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="northd" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.667583 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3051381c-49c8-4217-9831-013ca2931604" containerName="ovnkube-controller" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.669623 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.682461 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3051381c-49c8-4217-9831-013ca2931604" (UID: "3051381c-49c8-4217-9831-013ca2931604"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749168 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-log-socket\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749240 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749260 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-var-lib-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749331 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-systemd-units\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749359 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-ovn\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749374 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-bin\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749391 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-kubelet\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749407 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qstr\" (UniqueName: \"kubernetes.io/projected/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-kube-api-access-8qstr\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749502 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749555 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-script-lib\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749632 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-config\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749676 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-node-log\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749701 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-env-overrides\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749747 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-netd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749806 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-etc-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749838 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749910 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-systemd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749977 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovn-node-metrics-cert\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.749995 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-netns\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750011 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-slash\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750077 4856 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750114 4856 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750124 4856 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3051381c-49c8-4217-9831-013ca2931604-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750137 4856 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3051381c-49c8-4217-9831-013ca2931604-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750147 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g5ht\" (UniqueName: \"kubernetes.io/projected/3051381c-49c8-4217-9831-013ca2931604-kube-api-access-2g5ht\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.750156 4856 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3051381c-49c8-4217-9831-013ca2931604-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851027 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-node-log\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851078 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-env-overrides\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851106 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-netd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851136 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-etc-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851158 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851186 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-systemd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851212 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovn-node-metrics-cert\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851212 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-node-log\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851276 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-netns\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851235 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-netns\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851328 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-slash\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851352 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-log-socket\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851382 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851403 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-var-lib-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851438 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-systemd-units\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851472 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-ovn\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851491 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-bin\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851518 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-kubelet\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851542 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qstr\" (UniqueName: \"kubernetes.io/projected/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-kube-api-access-8qstr\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851566 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851610 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-script-lib\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851635 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-config\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851995 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-env-overrides\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852061 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-netd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852096 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-var-lib-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852125 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852155 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-systemd\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852292 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-slash\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852400 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-run-ovn\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852403 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-systemd-units\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.851302 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-etc-openvswitch\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852363 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-config\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852481 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-cni-bin\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852514 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-kubelet\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852723 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-log-socket\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852803 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-run-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.852769 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.853483 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovnkube-script-lib\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.856395 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-ovn-node-metrics-cert\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.879347 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qstr\" (UniqueName: \"kubernetes.io/projected/1bef9dd4-d909-4c4e-9ded-6da5d4ecc419-kube-api-access-8qstr\") pod \"ovnkube-node-45h42\" (UID: \"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419\") " pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.910216 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovnkube-controller/3.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.913336 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovn-acl-logging/0.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914047 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l5jg6_3051381c-49c8-4217-9831-013ca2931604/ovn-controller/0.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914657 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914679 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914688 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914698 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914705 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914710 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" exitCode=0 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914716 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" exitCode=143 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914722 4856 generic.go:334] "Generic (PLEG): container finished" podID="3051381c-49c8-4217-9831-013ca2931604" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" exitCode=143 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914770 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914805 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914780 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914816 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914830 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914839 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914848 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914857 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914867 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914872 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914878 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914884 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914891 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914897 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914903 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914912 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914921 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914930 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914937 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914944 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914950 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914956 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914964 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914969 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914974 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914979 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914984 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914991 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914999 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915005 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915010 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915015 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915021 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915026 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915031 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915038 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915042 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915047 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915055 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l5jg6" event={"ID":"3051381c-49c8-4217-9831-013ca2931604","Type":"ContainerDied","Data":"df203f6a4c9ca83756f8b45f6facc8ebf3e7a21853cc51d6d4de07eb99a911ed"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915061 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915067 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915074 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915080 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915085 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915090 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915095 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915100 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915105 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.915109 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.914948 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.918373 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/2.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.919318 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/1.log" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.919356 4856 generic.go:334] "Generic (PLEG): container finished" podID="536def47-c9d3-4c3e-9b4a-3776e034998b" containerID="4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7" exitCode=2 Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.919376 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerDied","Data":"4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.919405 4856 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503"} Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.921022 4856 scope.go:117] "RemoveContainer" containerID="4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7" Dec 02 00:17:28 crc kubenswrapper[4856]: E1202 00:17:28.921222 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5mfwj_openshift-multus(536def47-c9d3-4c3e-9b4a-3776e034998b)\"" pod="openshift-multus/multus-5mfwj" podUID="536def47-c9d3-4c3e-9b4a-3776e034998b" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.943261 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.969352 4856 scope.go:117] "RemoveContainer" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.969521 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l5jg6"] Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.982188 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l5jg6"] Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.984406 4856 scope.go:117] "RemoveContainer" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:28 crc kubenswrapper[4856]: I1202 00:17:28.987660 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.002716 4856 scope.go:117] "RemoveContainer" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.024208 4856 scope.go:117] "RemoveContainer" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.042246 4856 scope.go:117] "RemoveContainer" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.056357 4856 scope.go:117] "RemoveContainer" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.073418 4856 scope.go:117] "RemoveContainer" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.104191 4856 scope.go:117] "RemoveContainer" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.119314 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.119913 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.119960 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} err="failed to get container status \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.119992 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.120360 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": container with ID starting with 35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff not found: ID does not exist" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.120406 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} err="failed to get container status \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": rpc error: code = NotFound desc = could not find container \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": container with ID starting with 35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.120436 4856 scope.go:117] "RemoveContainer" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.120857 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": container with ID starting with f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683 not found: ID does not exist" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.120880 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} err="failed to get container status \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": rpc error: code = NotFound desc = could not find container \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": container with ID starting with f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.120894 4856 scope.go:117] "RemoveContainer" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.121194 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": container with ID starting with 38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631 not found: ID does not exist" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.121269 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} err="failed to get container status \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": rpc error: code = NotFound desc = could not find container \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": container with ID starting with 38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.121334 4856 scope.go:117] "RemoveContainer" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.121821 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": container with ID starting with acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093 not found: ID does not exist" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.121846 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} err="failed to get container status \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": rpc error: code = NotFound desc = could not find container \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": container with ID starting with acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.121861 4856 scope.go:117] "RemoveContainer" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.122103 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": container with ID starting with 2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67 not found: ID does not exist" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122180 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} err="failed to get container status \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": rpc error: code = NotFound desc = could not find container \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": container with ID starting with 2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122238 4856 scope.go:117] "RemoveContainer" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.122552 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": container with ID starting with ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b not found: ID does not exist" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122573 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} err="failed to get container status \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": rpc error: code = NotFound desc = could not find container \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": container with ID starting with ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122611 4856 scope.go:117] "RemoveContainer" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.122880 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": container with ID starting with 31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476 not found: ID does not exist" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122906 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} err="failed to get container status \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": rpc error: code = NotFound desc = could not find container \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": container with ID starting with 31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.122919 4856 scope.go:117] "RemoveContainer" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.123272 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": container with ID starting with 5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e not found: ID does not exist" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123295 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} err="failed to get container status \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": rpc error: code = NotFound desc = could not find container \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": container with ID starting with 5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123314 4856 scope.go:117] "RemoveContainer" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: E1202 00:17:29.123581 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": container with ID starting with 245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45 not found: ID does not exist" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123632 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} err="failed to get container status \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": rpc error: code = NotFound desc = could not find container \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": container with ID starting with 245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123649 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123963 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} err="failed to get container status \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.123990 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.124315 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} err="failed to get container status \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": rpc error: code = NotFound desc = could not find container \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": container with ID starting with 35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.124334 4856 scope.go:117] "RemoveContainer" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.124743 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} err="failed to get container status \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": rpc error: code = NotFound desc = could not find container \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": container with ID starting with f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.124764 4856 scope.go:117] "RemoveContainer" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125132 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} err="failed to get container status \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": rpc error: code = NotFound desc = could not find container \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": container with ID starting with 38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125152 4856 scope.go:117] "RemoveContainer" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125426 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} err="failed to get container status \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": rpc error: code = NotFound desc = could not find container \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": container with ID starting with acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125451 4856 scope.go:117] "RemoveContainer" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125786 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} err="failed to get container status \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": rpc error: code = NotFound desc = could not find container \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": container with ID starting with 2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.125809 4856 scope.go:117] "RemoveContainer" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126081 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} err="failed to get container status \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": rpc error: code = NotFound desc = could not find container \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": container with ID starting with ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126103 4856 scope.go:117] "RemoveContainer" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126350 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} err="failed to get container status \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": rpc error: code = NotFound desc = could not find container \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": container with ID starting with 31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126375 4856 scope.go:117] "RemoveContainer" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126713 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} err="failed to get container status \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": rpc error: code = NotFound desc = could not find container \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": container with ID starting with 5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.126734 4856 scope.go:117] "RemoveContainer" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.127027 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} err="failed to get container status \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": rpc error: code = NotFound desc = could not find container \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": container with ID starting with 245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.127120 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.127563 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} err="failed to get container status \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.127628 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.128906 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} err="failed to get container status \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": rpc error: code = NotFound desc = could not find container \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": container with ID starting with 35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.128987 4856 scope.go:117] "RemoveContainer" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129284 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} err="failed to get container status \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": rpc error: code = NotFound desc = could not find container \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": container with ID starting with f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129308 4856 scope.go:117] "RemoveContainer" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129575 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} err="failed to get container status \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": rpc error: code = NotFound desc = could not find container \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": container with ID starting with 38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129607 4856 scope.go:117] "RemoveContainer" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129875 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} err="failed to get container status \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": rpc error: code = NotFound desc = could not find container \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": container with ID starting with acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.129890 4856 scope.go:117] "RemoveContainer" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130092 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} err="failed to get container status \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": rpc error: code = NotFound desc = could not find container \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": container with ID starting with 2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130109 4856 scope.go:117] "RemoveContainer" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130450 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} err="failed to get container status \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": rpc error: code = NotFound desc = could not find container \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": container with ID starting with ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130540 4856 scope.go:117] "RemoveContainer" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130852 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} err="failed to get container status \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": rpc error: code = NotFound desc = could not find container \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": container with ID starting with 31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.130875 4856 scope.go:117] "RemoveContainer" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131105 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} err="failed to get container status \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": rpc error: code = NotFound desc = could not find container \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": container with ID starting with 5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131126 4856 scope.go:117] "RemoveContainer" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131328 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} err="failed to get container status \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": rpc error: code = NotFound desc = could not find container \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": container with ID starting with 245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131343 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131582 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} err="failed to get container status \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131621 4856 scope.go:117] "RemoveContainer" containerID="35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131852 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff"} err="failed to get container status \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": rpc error: code = NotFound desc = could not find container \"35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff\": container with ID starting with 35303d6b1fbcb3fbf5a6bf459503e7e4817dacbf4697dc98a09ac59a48201fff not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.131871 4856 scope.go:117] "RemoveContainer" containerID="f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132106 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683"} err="failed to get container status \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": rpc error: code = NotFound desc = could not find container \"f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683\": container with ID starting with f520635cbcb56d7accd262f60f9453291cce9e9594610f591ceb6574edd1d683 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132135 4856 scope.go:117] "RemoveContainer" containerID="38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132362 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631"} err="failed to get container status \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": rpc error: code = NotFound desc = could not find container \"38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631\": container with ID starting with 38ba956bdd9a06d9ce50e2655857d184a786fb05b2486b1be8f5c5fe6d4f3631 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132382 4856 scope.go:117] "RemoveContainer" containerID="acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132553 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093"} err="failed to get container status \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": rpc error: code = NotFound desc = could not find container \"acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093\": container with ID starting with acb091f24ba3ad5e4e4bcc127faea8b0d0717688ec9a2d3c9150ccc4a8531093 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.132576 4856 scope.go:117] "RemoveContainer" containerID="2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133068 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67"} err="failed to get container status \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": rpc error: code = NotFound desc = could not find container \"2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67\": container with ID starting with 2ac6c070aa5cc29f62a41053512f7929f4f1277d68a16065fcdceb1a295e6a67 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133089 4856 scope.go:117] "RemoveContainer" containerID="ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133294 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b"} err="failed to get container status \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": rpc error: code = NotFound desc = could not find container \"ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b\": container with ID starting with ef93bde3ca47688fe695381b5817dddd0de41ec09eb92e371531059f1ddcdf1b not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133314 4856 scope.go:117] "RemoveContainer" containerID="31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133527 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476"} err="failed to get container status \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": rpc error: code = NotFound desc = could not find container \"31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476\": container with ID starting with 31278f726a0bfbbd3fe76e066751b06cb1915831c9303266da15807cb69bb476 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133545 4856 scope.go:117] "RemoveContainer" containerID="5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133800 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e"} err="failed to get container status \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": rpc error: code = NotFound desc = could not find container \"5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e\": container with ID starting with 5d1d1d36ea843dce6a3e39cfe03f7185a5ac9c99883144ca4bd1fc9f8800288e not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.133814 4856 scope.go:117] "RemoveContainer" containerID="245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.134066 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45"} err="failed to get container status \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": rpc error: code = NotFound desc = could not find container \"245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45\": container with ID starting with 245d30fd70073274388e5c39105fc2c3009104ed939878c3bc069d88170d6d45 not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.134099 4856 scope.go:117] "RemoveContainer" containerID="67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.134353 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd"} err="failed to get container status \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": rpc error: code = NotFound desc = could not find container \"67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd\": container with ID starting with 67f3e7c786cd3a42f9ce5ceafc4c0801be99fa61762c7638e8a18a37fe086cbd not found: ID does not exist" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.262503 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3051381c-49c8-4217-9831-013ca2931604" path="/var/lib/kubelet/pods/3051381c-49c8-4217-9831-013ca2931604/volumes" Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.932964 4856 generic.go:334] "Generic (PLEG): container finished" podID="1bef9dd4-d909-4c4e-9ded-6da5d4ecc419" containerID="3dbc837ce9c5dfac3dc2230a54c243322e80fb75a7c14314e19cbec1caa751b8" exitCode=0 Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.933036 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerDied","Data":"3dbc837ce9c5dfac3dc2230a54c243322e80fb75a7c14314e19cbec1caa751b8"} Dec 02 00:17:29 crc kubenswrapper[4856]: I1202 00:17:29.933081 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"a222aeb8cd97c2d6b318330cad06d3e1066dd1af263a7d6814c307813521b298"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942457 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"b33b9b0441d81b881b596ce90d26a29d37789d88adde70d034d764403e6fa852"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942794 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"56a485c22fd9ef557a053d24d34188ab6b9afd0b72cee996cb9c673a02482352"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942807 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"efcad77b2b3365c65118995dcec2b460b611069a7c24c3d1def724daaba9b828"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942817 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"dd32ea4236097820808fb6501f75ace60061584b6b4c6fb45e024f33a38e5cdd"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942826 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"f1c15a24bbb341b0a6175447c964778e63ef06da4a2bfaca3bc2d97f78c6a7d1"} Dec 02 00:17:30 crc kubenswrapper[4856]: I1202 00:17:30.942834 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"91c75c94212703a3c09fc96609ab8a7e11cff5f9439f94e0786a1c3ce36115d1"} Dec 02 00:17:33 crc kubenswrapper[4856]: I1202 00:17:33.970573 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"926367c9ed5fee91c5840cabda8f006ac292af13665703df945a8d6ba85b62d6"} Dec 02 00:17:35 crc kubenswrapper[4856]: I1202 00:17:35.983144 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" event={"ID":"1bef9dd4-d909-4c4e-9ded-6da5d4ecc419","Type":"ContainerStarted","Data":"950f0d265100277e9877051d1a15c51c1a7babdb418b337fe67f57c54541a866"} Dec 02 00:17:35 crc kubenswrapper[4856]: I1202 00:17:35.983759 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:35 crc kubenswrapper[4856]: I1202 00:17:35.983775 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:35 crc kubenswrapper[4856]: I1202 00:17:35.983786 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:36 crc kubenswrapper[4856]: I1202 00:17:36.008276 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:36 crc kubenswrapper[4856]: I1202 00:17:36.009614 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:17:36 crc kubenswrapper[4856]: I1202 00:17:36.020000 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" podStartSLOduration=8.019985348 podStartE2EDuration="8.019985348s" podCreationTimestamp="2025-12-02 00:17:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:17:36.017049906 +0000 UTC m=+683.043417910" watchObservedRunningTime="2025-12-02 00:17:36.019985348 +0000 UTC m=+683.046353352" Dec 02 00:17:40 crc kubenswrapper[4856]: I1202 00:17:40.253654 4856 scope.go:117] "RemoveContainer" containerID="4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7" Dec 02 00:17:40 crc kubenswrapper[4856]: E1202 00:17:40.254336 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5mfwj_openshift-multus(536def47-c9d3-4c3e-9b4a-3776e034998b)\"" pod="openshift-multus/multus-5mfwj" podUID="536def47-c9d3-4c3e-9b4a-3776e034998b" Dec 02 00:17:54 crc kubenswrapper[4856]: I1202 00:17:54.252745 4856 scope.go:117] "RemoveContainer" containerID="4fea79bd23bfafff699e40d2d9488ef0f7f8a3f02010dce530663591daa504a7" Dec 02 00:17:55 crc kubenswrapper[4856]: I1202 00:17:55.099542 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/2.log" Dec 02 00:17:55 crc kubenswrapper[4856]: I1202 00:17:55.100815 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/1.log" Dec 02 00:17:55 crc kubenswrapper[4856]: I1202 00:17:55.101054 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5mfwj" event={"ID":"536def47-c9d3-4c3e-9b4a-3776e034998b","Type":"ContainerStarted","Data":"78b51d88f1b67714523932d33bacf0a8c1129dde82e300e0d5710a7cda08f4db"} Dec 02 00:17:59 crc kubenswrapper[4856]: I1202 00:17:59.009747 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-45h42" Dec 02 00:18:13 crc kubenswrapper[4856]: I1202 00:18:13.673280 4856 scope.go:117] "RemoveContainer" containerID="7082ffe076a02439194564edcc80e68738d58df0d78fd91902ca555947068503" Dec 02 00:18:14 crc kubenswrapper[4856]: I1202 00:18:14.237091 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5mfwj_536def47-c9d3-4c3e-9b4a-3776e034998b/kube-multus/2.log" Dec 02 00:18:35 crc kubenswrapper[4856]: I1202 00:18:35.062080 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:18:35 crc kubenswrapper[4856]: I1202 00:18:35.062578 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:18:36 crc kubenswrapper[4856]: I1202 00:18:36.718906 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:18:36 crc kubenswrapper[4856]: I1202 00:18:36.720148 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sdbs6" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="registry-server" containerID="cri-o://aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b" gracePeriod=30 Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.109629 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.287909 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk9sc\" (UniqueName: \"kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc\") pod \"f62c12f8-4128-49e1-b40e-186775fff67c\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.288044 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities\") pod \"f62c12f8-4128-49e1-b40e-186775fff67c\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.288307 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content\") pod \"f62c12f8-4128-49e1-b40e-186775fff67c\" (UID: \"f62c12f8-4128-49e1-b40e-186775fff67c\") " Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.288886 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities" (OuterVolumeSpecName: "utilities") pod "f62c12f8-4128-49e1-b40e-186775fff67c" (UID: "f62c12f8-4128-49e1-b40e-186775fff67c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.293676 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc" (OuterVolumeSpecName: "kube-api-access-xk9sc") pod "f62c12f8-4128-49e1-b40e-186775fff67c" (UID: "f62c12f8-4128-49e1-b40e-186775fff67c"). InnerVolumeSpecName "kube-api-access-xk9sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.311994 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f62c12f8-4128-49e1-b40e-186775fff67c" (UID: "f62c12f8-4128-49e1-b40e-186775fff67c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.374018 4856 generic.go:334] "Generic (PLEG): container finished" podID="f62c12f8-4128-49e1-b40e-186775fff67c" containerID="aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b" exitCode=0 Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.374063 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerDied","Data":"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b"} Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.374092 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sdbs6" event={"ID":"f62c12f8-4128-49e1-b40e-186775fff67c","Type":"ContainerDied","Data":"ba19ebf83a5d63ca0568772ce017e81651d9e15d896f2238698cd6ca8a0996c7"} Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.374112 4856 scope.go:117] "RemoveContainer" containerID="aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.374126 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sdbs6" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.389243 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.389277 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f62c12f8-4128-49e1-b40e-186775fff67c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.389290 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk9sc\" (UniqueName: \"kubernetes.io/projected/f62c12f8-4128-49e1-b40e-186775fff67c-kube-api-access-xk9sc\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.402001 4856 scope.go:117] "RemoveContainer" containerID="1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.407000 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.409086 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sdbs6"] Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.432190 4856 scope.go:117] "RemoveContainer" containerID="169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.444292 4856 scope.go:117] "RemoveContainer" containerID="aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b" Dec 02 00:18:37 crc kubenswrapper[4856]: E1202 00:18:37.444563 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b\": container with ID starting with aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b not found: ID does not exist" containerID="aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.444610 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b"} err="failed to get container status \"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b\": rpc error: code = NotFound desc = could not find container \"aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b\": container with ID starting with aa36f31d284ce2cf4f1d07c474daf2996e7862fd874e7aafc9a291498dc8bb4b not found: ID does not exist" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.444634 4856 scope.go:117] "RemoveContainer" containerID="1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255" Dec 02 00:18:37 crc kubenswrapper[4856]: E1202 00:18:37.444901 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255\": container with ID starting with 1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255 not found: ID does not exist" containerID="1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.444938 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255"} err="failed to get container status \"1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255\": rpc error: code = NotFound desc = could not find container \"1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255\": container with ID starting with 1b941d539570fb4818b2908a9c33b51444a3a63c22513dcab3a5741e18978255 not found: ID does not exist" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.444963 4856 scope.go:117] "RemoveContainer" containerID="169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539" Dec 02 00:18:37 crc kubenswrapper[4856]: E1202 00:18:37.445194 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539\": container with ID starting with 169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539 not found: ID does not exist" containerID="169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539" Dec 02 00:18:37 crc kubenswrapper[4856]: I1202 00:18:37.445225 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539"} err="failed to get container status \"169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539\": rpc error: code = NotFound desc = could not find container \"169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539\": container with ID starting with 169b58bd9208964da2d9780d5e277504dfdafb73d6c667a3e7a78fe1afa8e539 not found: ID does not exist" Dec 02 00:18:39 crc kubenswrapper[4856]: I1202 00:18:39.258006 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" path="/var/lib/kubelet/pods/f62c12f8-4128-49e1-b40e-186775fff67c/volumes" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.496087 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm"] Dec 02 00:18:40 crc kubenswrapper[4856]: E1202 00:18:40.496774 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="extract-content" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.496813 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="extract-content" Dec 02 00:18:40 crc kubenswrapper[4856]: E1202 00:18:40.496838 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="extract-utilities" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.496847 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="extract-utilities" Dec 02 00:18:40 crc kubenswrapper[4856]: E1202 00:18:40.496866 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="registry-server" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.496875 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="registry-server" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.497060 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62c12f8-4128-49e1-b40e-186775fff67c" containerName="registry-server" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.498008 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.500806 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.509418 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm"] Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.624585 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.624697 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mcbk\" (UniqueName: \"kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.624806 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.725654 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.726005 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.726158 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mcbk\" (UniqueName: \"kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.726198 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.726657 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.743243 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mcbk\" (UniqueName: \"kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:40 crc kubenswrapper[4856]: I1202 00:18:40.833159 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:41 crc kubenswrapper[4856]: I1202 00:18:41.264632 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm"] Dec 02 00:18:41 crc kubenswrapper[4856]: I1202 00:18:41.402669 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" event={"ID":"1fd9f804-c0c5-4f64-8328-30f1e6b25a98","Type":"ContainerStarted","Data":"7f5e3b539c5c6d05f78607ade3f7824d51d855d7eb4f6d031b8261e5307c421e"} Dec 02 00:18:42 crc kubenswrapper[4856]: I1202 00:18:42.418795 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerID="aa77cbcc69ec516e0a321be4118eb356fadb9646f5cdc6d4bae199d8078f2558" exitCode=0 Dec 02 00:18:42 crc kubenswrapper[4856]: I1202 00:18:42.418847 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" event={"ID":"1fd9f804-c0c5-4f64-8328-30f1e6b25a98","Type":"ContainerDied","Data":"aa77cbcc69ec516e0a321be4118eb356fadb9646f5cdc6d4bae199d8078f2558"} Dec 02 00:18:42 crc kubenswrapper[4856]: I1202 00:18:42.420956 4856 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 00:18:44 crc kubenswrapper[4856]: I1202 00:18:44.429713 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerID="197f7d9a4dba98e9b726f39ce290ef623ac34dbc4769e5d51285b6e1c56f7c6e" exitCode=0 Dec 02 00:18:44 crc kubenswrapper[4856]: I1202 00:18:44.429819 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" event={"ID":"1fd9f804-c0c5-4f64-8328-30f1e6b25a98","Type":"ContainerDied","Data":"197f7d9a4dba98e9b726f39ce290ef623ac34dbc4769e5d51285b6e1c56f7c6e"} Dec 02 00:18:45 crc kubenswrapper[4856]: I1202 00:18:45.439549 4856 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerID="e030d9c09ebb09c039c9789806fbfbaec32263161ed3b98c839640321a672f57" exitCode=0 Dec 02 00:18:45 crc kubenswrapper[4856]: I1202 00:18:45.439703 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" event={"ID":"1fd9f804-c0c5-4f64-8328-30f1e6b25a98","Type":"ContainerDied","Data":"e030d9c09ebb09c039c9789806fbfbaec32263161ed3b98c839640321a672f57"} Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.692227 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.699248 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util\") pod \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.699337 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mcbk\" (UniqueName: \"kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk\") pod \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.699386 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle\") pod \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\" (UID: \"1fd9f804-c0c5-4f64-8328-30f1e6b25a98\") " Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.702641 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle" (OuterVolumeSpecName: "bundle") pod "1fd9f804-c0c5-4f64-8328-30f1e6b25a98" (UID: "1fd9f804-c0c5-4f64-8328-30f1e6b25a98"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.706120 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk" (OuterVolumeSpecName: "kube-api-access-6mcbk") pod "1fd9f804-c0c5-4f64-8328-30f1e6b25a98" (UID: "1fd9f804-c0c5-4f64-8328-30f1e6b25a98"). InnerVolumeSpecName "kube-api-access-6mcbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.713503 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util" (OuterVolumeSpecName: "util") pod "1fd9f804-c0c5-4f64-8328-30f1e6b25a98" (UID: "1fd9f804-c0c5-4f64-8328-30f1e6b25a98"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.800175 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.800209 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mcbk\" (UniqueName: \"kubernetes.io/projected/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-kube-api-access-6mcbk\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:46 crc kubenswrapper[4856]: I1202 00:18:46.800221 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f804-c0c5-4f64-8328-30f1e6b25a98-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.460764 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" event={"ID":"1fd9f804-c0c5-4f64-8328-30f1e6b25a98","Type":"ContainerDied","Data":"7f5e3b539c5c6d05f78607ade3f7824d51d855d7eb4f6d031b8261e5307c421e"} Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.460819 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f5e3b539c5c6d05f78607ade3f7824d51d855d7eb4f6d031b8261e5307c421e" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.460887 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.504292 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf"] Dec 02 00:18:47 crc kubenswrapper[4856]: E1202 00:18:47.504679 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="extract" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.504706 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="extract" Dec 02 00:18:47 crc kubenswrapper[4856]: E1202 00:18:47.504743 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="pull" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.504760 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="pull" Dec 02 00:18:47 crc kubenswrapper[4856]: E1202 00:18:47.504797 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="util" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.504813 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="util" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.505032 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd9f804-c0c5-4f64-8328-30f1e6b25a98" containerName="extract" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.506254 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.508034 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.510039 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf"] Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.707180 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.707269 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj78g\" (UniqueName: \"kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.707340 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.807957 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.808029 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj78g\" (UniqueName: \"kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.808075 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.808536 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.808560 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:47 crc kubenswrapper[4856]: I1202 00:18:47.825965 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj78g\" (UniqueName: \"kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:48 crc kubenswrapper[4856]: I1202 00:18:48.121565 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:48 crc kubenswrapper[4856]: I1202 00:18:48.531302 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf"] Dec 02 00:18:49 crc kubenswrapper[4856]: I1202 00:18:49.475386 4856 generic.go:334] "Generic (PLEG): container finished" podID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerID="77bf01409648a68cd5066a2201da0c78bc0772e496a46a72a3b6145a38d9b539" exitCode=0 Dec 02 00:18:49 crc kubenswrapper[4856]: I1202 00:18:49.475464 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" event={"ID":"dfd38b07-2dea-4521-a264-3bf95800aad7","Type":"ContainerDied","Data":"77bf01409648a68cd5066a2201da0c78bc0772e496a46a72a3b6145a38d9b539"} Dec 02 00:18:49 crc kubenswrapper[4856]: I1202 00:18:49.475726 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" event={"ID":"dfd38b07-2dea-4521-a264-3bf95800aad7","Type":"ContainerStarted","Data":"9e2b5858a49c93bb6a8b01e4961ae087990598ad597bcf0c39ab5bc49339057a"} Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.372117 4856 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.463705 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.464887 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.482374 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.513759 4856 generic.go:334] "Generic (PLEG): container finished" podID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerID="d45129354871fb2b0b18f81df4c1be3b7c7bb7333b2d91690ddc5e66c0430b7f" exitCode=0 Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.513822 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" event={"ID":"dfd38b07-2dea-4521-a264-3bf95800aad7","Type":"ContainerDied","Data":"d45129354871fb2b0b18f81df4c1be3b7c7bb7333b2d91690ddc5e66c0430b7f"} Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.574032 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.574090 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.574127 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mdl4\" (UniqueName: \"kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.675227 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mdl4\" (UniqueName: \"kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.675308 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.675336 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.675844 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.675894 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.713702 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mdl4\" (UniqueName: \"kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4\") pod \"certified-operators-7f76l\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:51 crc kubenswrapper[4856]: I1202 00:18:51.779617 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.051043 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.521371 4856 generic.go:334] "Generic (PLEG): container finished" podID="2de9e41d-ebac-4023-a405-36dca3999165" containerID="fed0f4c869cf934d974d79a1b4d93661c1612daae833c4072e404b8929671ef7" exitCode=0 Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.521463 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerDied","Data":"fed0f4c869cf934d974d79a1b4d93661c1612daae833c4072e404b8929671ef7"} Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.521780 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerStarted","Data":"05d241f827d8cc383515d298f3c1a777ca181d7d067628bce36321f7777dd469"} Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.530935 4856 generic.go:334] "Generic (PLEG): container finished" podID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerID="8782c663c4f8c71dae17abb453f9111569062f8c8bc3b6fcade5a38827e6be5c" exitCode=0 Dec 02 00:18:52 crc kubenswrapper[4856]: I1202 00:18:52.531066 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" event={"ID":"dfd38b07-2dea-4521-a264-3bf95800aad7","Type":"ContainerDied","Data":"8782c663c4f8c71dae17abb453f9111569062f8c8bc3b6fcade5a38827e6be5c"} Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.478913 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv"] Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.480143 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.494731 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.494783 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.494858 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6765\" (UniqueName: \"kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.524991 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv"] Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.536744 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerStarted","Data":"94491e7222cc33572c7f3f1382d72b031c41da6295698d07d23e1e450bffa493"} Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.595635 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.595673 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.595693 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6765\" (UniqueName: \"kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.596052 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.596146 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.668624 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6765\" (UniqueName: \"kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.797616 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:18:53 crc kubenswrapper[4856]: I1202 00:18:53.917111 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.104806 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util\") pod \"dfd38b07-2dea-4521-a264-3bf95800aad7\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.104898 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj78g\" (UniqueName: \"kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g\") pod \"dfd38b07-2dea-4521-a264-3bf95800aad7\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.104939 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle\") pod \"dfd38b07-2dea-4521-a264-3bf95800aad7\" (UID: \"dfd38b07-2dea-4521-a264-3bf95800aad7\") " Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.105944 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle" (OuterVolumeSpecName: "bundle") pod "dfd38b07-2dea-4521-a264-3bf95800aad7" (UID: "dfd38b07-2dea-4521-a264-3bf95800aad7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.112804 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g" (OuterVolumeSpecName: "kube-api-access-dj78g") pod "dfd38b07-2dea-4521-a264-3bf95800aad7" (UID: "dfd38b07-2dea-4521-a264-3bf95800aad7"). InnerVolumeSpecName "kube-api-access-dj78g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.206366 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj78g\" (UniqueName: \"kubernetes.io/projected/dfd38b07-2dea-4521-a264-3bf95800aad7-kube-api-access-dj78g\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.206645 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.305636 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv"] Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.549617 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" event={"ID":"dfd38b07-2dea-4521-a264-3bf95800aad7","Type":"ContainerDied","Data":"9e2b5858a49c93bb6a8b01e4961ae087990598ad597bcf0c39ab5bc49339057a"} Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.549652 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e2b5858a49c93bb6a8b01e4961ae087990598ad597bcf0c39ab5bc49339057a" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.549711 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.551302 4856 generic.go:334] "Generic (PLEG): container finished" podID="2de9e41d-ebac-4023-a405-36dca3999165" containerID="94491e7222cc33572c7f3f1382d72b031c41da6295698d07d23e1e450bffa493" exitCode=0 Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.551342 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerDied","Data":"94491e7222cc33572c7f3f1382d72b031c41da6295698d07d23e1e450bffa493"} Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.554910 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" event={"ID":"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d","Type":"ContainerStarted","Data":"1d1e20fba6feefa1314c113cb82c44fd9366b8bab465a3c1247f154a05285cb3"} Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.563298 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util" (OuterVolumeSpecName: "util") pod "dfd38b07-2dea-4521-a264-3bf95800aad7" (UID: "dfd38b07-2dea-4521-a264-3bf95800aad7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:18:54 crc kubenswrapper[4856]: I1202 00:18:54.611981 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfd38b07-2dea-4521-a264-3bf95800aad7-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:18:55 crc kubenswrapper[4856]: I1202 00:18:55.561974 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerStarted","Data":"972dd81215920644368be3e6963bf61da54e5b71963ddb65115b6929f09e40d2"} Dec 02 00:18:55 crc kubenswrapper[4856]: I1202 00:18:55.563655 4856 generic.go:334] "Generic (PLEG): container finished" podID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerID="e071ec4329759b481fc7ee9b7f96d6f0dc62cca31a0eb5172a131ccaeefdec5b" exitCode=0 Dec 02 00:18:55 crc kubenswrapper[4856]: I1202 00:18:55.563683 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" event={"ID":"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d","Type":"ContainerDied","Data":"e071ec4329759b481fc7ee9b7f96d6f0dc62cca31a0eb5172a131ccaeefdec5b"} Dec 02 00:18:55 crc kubenswrapper[4856]: I1202 00:18:55.584343 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7f76l" podStartSLOduration=2.067092754 podStartE2EDuration="4.584329653s" podCreationTimestamp="2025-12-02 00:18:51 +0000 UTC" firstStartedPulling="2025-12-02 00:18:52.525852366 +0000 UTC m=+759.552220370" lastFinishedPulling="2025-12-02 00:18:55.043089275 +0000 UTC m=+762.069457269" observedRunningTime="2025-12-02 00:18:55.582800386 +0000 UTC m=+762.609168390" watchObservedRunningTime="2025-12-02 00:18:55.584329653 +0000 UTC m=+762.610697657" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.057785 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:18:58 crc kubenswrapper[4856]: E1202 00:18:58.058303 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="pull" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.058318 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="pull" Dec 02 00:18:58 crc kubenswrapper[4856]: E1202 00:18:58.058333 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="extract" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.058340 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="extract" Dec 02 00:18:58 crc kubenswrapper[4856]: E1202 00:18:58.058352 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="util" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.058358 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="util" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.058451 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfd38b07-2dea-4521-a264-3bf95800aad7" containerName="extract" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.059143 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.074077 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.258963 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.259045 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.259069 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdmxc\" (UniqueName: \"kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.360004 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.360067 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdmxc\" (UniqueName: \"kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.360119 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.360624 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.360732 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.391934 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdmxc\" (UniqueName: \"kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc\") pod \"redhat-operators-gqgwn\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.439241 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.440105 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.442503 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.442516 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-xprth" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.446180 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.467110 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.557563 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.558693 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.562342 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.562748 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9h6l\" (UniqueName: \"kubernetes.io/projected/c56dba5d-a93c-45fb-8495-846b9098c7d1-kube-api-access-v9h6l\") pod \"obo-prometheus-operator-668cf9dfbb-j272l\" (UID: \"c56dba5d-a93c-45fb-8495-846b9098c7d1\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.562766 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-fq5th" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.564906 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.565710 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.572492 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.591629 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.664376 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.664444 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.664469 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.664507 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.664552 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9h6l\" (UniqueName: \"kubernetes.io/projected/c56dba5d-a93c-45fb-8495-846b9098c7d1-kube-api-access-v9h6l\") pod \"obo-prometheus-operator-668cf9dfbb-j272l\" (UID: \"c56dba5d-a93c-45fb-8495-846b9098c7d1\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.682867 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.682959 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9h6l\" (UniqueName: \"kubernetes.io/projected/c56dba5d-a93c-45fb-8495-846b9098c7d1-kube-api-access-v9h6l\") pod \"obo-prometheus-operator-668cf9dfbb-j272l\" (UID: \"c56dba5d-a93c-45fb-8495-846b9098c7d1\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.756000 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.766212 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.766288 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.766331 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.766377 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.770641 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.771208 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.772214 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-2tlhq"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.773704 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.774138 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/edfb110d-4069-42b8-a8bf-3cf5a74ba610-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks\" (UID: \"edfb110d-4069-42b8-a8bf-3cf5a74ba610\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.774378 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6906a6f6-08a2-4a23-b609-4b3f37976695-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs\" (UID: \"6906a6f6-08a2-4a23-b609-4b3f37976695\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.776385 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-b7rlk" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.776660 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.792301 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-2tlhq"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.867623 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hjkl\" (UniqueName: \"kubernetes.io/projected/35c7073d-b173-4200-a0fe-5df05f791e68-kube-api-access-7hjkl\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.867698 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/35c7073d-b173-4200-a0fe-5df05f791e68-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.880758 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.885775 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-wz88m"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.886412 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.890088 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-rvv7w" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.908985 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.909983 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-wz88m"] Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.968836 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/35c7073d-b173-4200-a0fe-5df05f791e68-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.968908 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nzgf\" (UniqueName: \"kubernetes.io/projected/909dd6ee-af48-452f-8de5-73fe740e006b-kube-api-access-5nzgf\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.968952 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/909dd6ee-af48-452f-8de5-73fe740e006b-openshift-service-ca\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.968975 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hjkl\" (UniqueName: \"kubernetes.io/projected/35c7073d-b173-4200-a0fe-5df05f791e68-kube-api-access-7hjkl\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.986276 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/35c7073d-b173-4200-a0fe-5df05f791e68-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:58 crc kubenswrapper[4856]: I1202 00:18:58.987451 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hjkl\" (UniqueName: \"kubernetes.io/projected/35c7073d-b173-4200-a0fe-5df05f791e68-kube-api-access-7hjkl\") pod \"observability-operator-d8bb48f5d-2tlhq\" (UID: \"35c7073d-b173-4200-a0fe-5df05f791e68\") " pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.070267 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/909dd6ee-af48-452f-8de5-73fe740e006b-openshift-service-ca\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.070712 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nzgf\" (UniqueName: \"kubernetes.io/projected/909dd6ee-af48-452f-8de5-73fe740e006b-kube-api-access-5nzgf\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.071317 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/909dd6ee-af48-452f-8de5-73fe740e006b-openshift-service-ca\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.103367 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nzgf\" (UniqueName: \"kubernetes.io/projected/909dd6ee-af48-452f-8de5-73fe740e006b-kube-api-access-5nzgf\") pod \"perses-operator-5446b9c989-wz88m\" (UID: \"909dd6ee-af48-452f-8de5-73fe740e006b\") " pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.141931 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:18:59 crc kubenswrapper[4856]: I1202 00:18:59.204931 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:19:00 crc kubenswrapper[4856]: I1202 00:19:00.980960 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:19:00 crc kubenswrapper[4856]: W1202 00:19:00.985211 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06adfe3c_90b0_4f00_9f2d_a9af5b0c5e75.slice/crio-46482f3ddeebe8b992ed6c278095c17e47b15f1ffa05eb35a10a7dae38efb3a5 WatchSource:0}: Error finding container 46482f3ddeebe8b992ed6c278095c17e47b15f1ffa05eb35a10a7dae38efb3a5: Status 404 returned error can't find the container with id 46482f3ddeebe8b992ed6c278095c17e47b15f1ffa05eb35a10a7dae38efb3a5 Dec 02 00:19:00 crc kubenswrapper[4856]: I1202 00:19:00.998904 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-wz88m"] Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.022402 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-2tlhq"] Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.027247 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l"] Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.041536 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs"] Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.213071 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks"] Dec 02 00:19:01 crc kubenswrapper[4856]: W1202 00:19:01.219461 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedfb110d_4069_42b8_a8bf_3cf5a74ba610.slice/crio-197e60f1d9f150ae03abc55b1b09c58b3b29eb4835ae147f44bf76262247a6d7 WatchSource:0}: Error finding container 197e60f1d9f150ae03abc55b1b09c58b3b29eb4835ae147f44bf76262247a6d7: Status 404 returned error can't find the container with id 197e60f1d9f150ae03abc55b1b09c58b3b29eb4835ae147f44bf76262247a6d7 Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.613384 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" event={"ID":"6906a6f6-08a2-4a23-b609-4b3f37976695","Type":"ContainerStarted","Data":"18a967d74cb6439b66e2eab0190d7386de6c433e48b7e776aa51739698ca4b34"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.614988 4856 generic.go:334] "Generic (PLEG): container finished" podID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerID="3531b36acf35173f87de36997848d41225864dfbc2d592eda0b1923afe704d97" exitCode=0 Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.615071 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerDied","Data":"3531b36acf35173f87de36997848d41225864dfbc2d592eda0b1923afe704d97"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.615099 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerStarted","Data":"46482f3ddeebe8b992ed6c278095c17e47b15f1ffa05eb35a10a7dae38efb3a5"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.616201 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" event={"ID":"edfb110d-4069-42b8-a8bf-3cf5a74ba610","Type":"ContainerStarted","Data":"197e60f1d9f150ae03abc55b1b09c58b3b29eb4835ae147f44bf76262247a6d7"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.617246 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" event={"ID":"c56dba5d-a93c-45fb-8495-846b9098c7d1","Type":"ContainerStarted","Data":"bf5d33a9dc1ce0a697565b48c16e5af49efd6e71bb920d7b39f7df258a13b4d1"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.620935 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" event={"ID":"35c7073d-b173-4200-a0fe-5df05f791e68","Type":"ContainerStarted","Data":"7603c9f0e28846a9e9800ef54777eef68ef31d7e9e2c66ecabbfaa9cf926108a"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.623629 4856 generic.go:334] "Generic (PLEG): container finished" podID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerID="33ffb44b26130ddc5a92277960b2e9f0e5705fe6e92e985c1a759c47afb01297" exitCode=0 Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.623801 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" event={"ID":"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d","Type":"ContainerDied","Data":"33ffb44b26130ddc5a92277960b2e9f0e5705fe6e92e985c1a759c47afb01297"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.624984 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-wz88m" event={"ID":"909dd6ee-af48-452f-8de5-73fe740e006b","Type":"ContainerStarted","Data":"00d9b903e81dd9ea0796f68ea910e6bc644f4d0ab1c74b87090d9bf182b58f24"} Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.779925 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.780759 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:01 crc kubenswrapper[4856]: I1202 00:19:01.824401 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.635081 4856 generic.go:334] "Generic (PLEG): container finished" podID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerID="09063a1c07c2ca751658d2de39970f978ce3bf5671d1e0f022435e1258b4ff05" exitCode=0 Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.635155 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" event={"ID":"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d","Type":"ContainerDied","Data":"09063a1c07c2ca751658d2de39970f978ce3bf5671d1e0f022435e1258b4ff05"} Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.713450 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.864867 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-96d9b59b6-rtjhh"] Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.865798 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.876105 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.876350 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.876454 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.876561 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-x5zsw" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.885753 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-96d9b59b6-rtjhh"] Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.933686 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-webhook-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.933733 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-apiservice-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:02 crc kubenswrapper[4856]: I1202 00:19:02.933795 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcvbt\" (UniqueName: \"kubernetes.io/projected/f5aeab99-08c9-480d-bab4-8a94933ae1df-kube-api-access-mcvbt\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.035172 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcvbt\" (UniqueName: \"kubernetes.io/projected/f5aeab99-08c9-480d-bab4-8a94933ae1df-kube-api-access-mcvbt\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.035234 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-webhook-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.035262 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-apiservice-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.041460 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-apiservice-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.044089 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5aeab99-08c9-480d-bab4-8a94933ae1df-webhook-cert\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.056202 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcvbt\" (UniqueName: \"kubernetes.io/projected/f5aeab99-08c9-480d-bab4-8a94933ae1df-kube-api-access-mcvbt\") pod \"elastic-operator-96d9b59b6-rtjhh\" (UID: \"f5aeab99-08c9-480d-bab4-8a94933ae1df\") " pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.196909 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.589642 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-96d9b59b6-rtjhh"] Dec 02 00:19:03 crc kubenswrapper[4856]: W1202 00:19:03.609740 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5aeab99_08c9_480d_bab4_8a94933ae1df.slice/crio-52129c275afa6533f72be44ce6c5776fcfc6088f9625d6a8491965f32b969952 WatchSource:0}: Error finding container 52129c275afa6533f72be44ce6c5776fcfc6088f9625d6a8491965f32b969952: Status 404 returned error can't find the container with id 52129c275afa6533f72be44ce6c5776fcfc6088f9625d6a8491965f32b969952 Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.650559 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerStarted","Data":"42a24361de4fbe04ec5ada15784d5e5115905644ee7837d3e440c121849e8368"} Dec 02 00:19:03 crc kubenswrapper[4856]: I1202 00:19:03.657516 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" event={"ID":"f5aeab99-08c9-480d-bab4-8a94933ae1df","Type":"ContainerStarted","Data":"52129c275afa6533f72be44ce6c5776fcfc6088f9625d6a8491965f32b969952"} Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.026740 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.163038 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6765\" (UniqueName: \"kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765\") pod \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.163104 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util\") pod \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.163128 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle\") pod \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\" (UID: \"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d\") " Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.171780 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765" (OuterVolumeSpecName: "kube-api-access-l6765") pod "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" (UID: "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d"). InnerVolumeSpecName "kube-api-access-l6765". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.172691 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle" (OuterVolumeSpecName: "bundle") pod "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" (UID: "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.179827 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util" (OuterVolumeSpecName: "util") pod "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" (UID: "fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.264640 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6765\" (UniqueName: \"kubernetes.io/projected/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-kube-api-access-l6765\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.264751 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.264816 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.684621 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" event={"ID":"fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d","Type":"ContainerDied","Data":"1d1e20fba6feefa1314c113cb82c44fd9366b8bab465a3c1247f154a05285cb3"} Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.684708 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d1e20fba6feefa1314c113cb82c44fd9366b8bab465a3c1247f154a05285cb3" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.684644 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv" Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.687887 4856 generic.go:334] "Generic (PLEG): container finished" podID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerID="42a24361de4fbe04ec5ada15784d5e5115905644ee7837d3e440c121849e8368" exitCode=0 Dec 02 00:19:04 crc kubenswrapper[4856]: I1202 00:19:04.688635 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerDied","Data":"42a24361de4fbe04ec5ada15784d5e5115905644ee7837d3e440c121849e8368"} Dec 02 00:19:05 crc kubenswrapper[4856]: I1202 00:19:05.061848 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:19:05 crc kubenswrapper[4856]: I1202 00:19:05.064166 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:19:06 crc kubenswrapper[4856]: I1202 00:19:06.709240 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerStarted","Data":"75e79f8e896c247c54017fea8896c2e40fe2e423106026288b77fe5a9c0495b0"} Dec 02 00:19:06 crc kubenswrapper[4856]: I1202 00:19:06.735811 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gqgwn" podStartSLOduration=4.643659424 podStartE2EDuration="8.735788698s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:01.616877284 +0000 UTC m=+768.643245288" lastFinishedPulling="2025-12-02 00:19:05.709006558 +0000 UTC m=+772.735374562" observedRunningTime="2025-12-02 00:19:06.733711618 +0000 UTC m=+773.760079642" watchObservedRunningTime="2025-12-02 00:19:06.735788698 +0000 UTC m=+773.762156702" Dec 02 00:19:06 crc kubenswrapper[4856]: I1202 00:19:06.857948 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:19:06 crc kubenswrapper[4856]: I1202 00:19:06.859810 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7f76l" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="registry-server" containerID="cri-o://972dd81215920644368be3e6963bf61da54e5b71963ddb65115b6929f09e40d2" gracePeriod=2 Dec 02 00:19:07 crc kubenswrapper[4856]: I1202 00:19:07.721715 4856 generic.go:334] "Generic (PLEG): container finished" podID="2de9e41d-ebac-4023-a405-36dca3999165" containerID="972dd81215920644368be3e6963bf61da54e5b71963ddb65115b6929f09e40d2" exitCode=0 Dec 02 00:19:07 crc kubenswrapper[4856]: I1202 00:19:07.722554 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerDied","Data":"972dd81215920644368be3e6963bf61da54e5b71963ddb65115b6929f09e40d2"} Dec 02 00:19:08 crc kubenswrapper[4856]: I1202 00:19:08.683667 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:08 crc kubenswrapper[4856]: I1202 00:19:08.684025 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:09 crc kubenswrapper[4856]: I1202 00:19:09.735532 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gqgwn" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="registry-server" probeResult="failure" output=< Dec 02 00:19:09 crc kubenswrapper[4856]: timeout: failed to connect service ":50051" within 1s Dec 02 00:19:09 crc kubenswrapper[4856]: > Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.030172 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.160885 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mdl4\" (UniqueName: \"kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4\") pod \"2de9e41d-ebac-4023-a405-36dca3999165\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.160946 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content\") pod \"2de9e41d-ebac-4023-a405-36dca3999165\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.161067 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities\") pod \"2de9e41d-ebac-4023-a405-36dca3999165\" (UID: \"2de9e41d-ebac-4023-a405-36dca3999165\") " Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.162129 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities" (OuterVolumeSpecName: "utilities") pod "2de9e41d-ebac-4023-a405-36dca3999165" (UID: "2de9e41d-ebac-4023-a405-36dca3999165"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.167275 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4" (OuterVolumeSpecName: "kube-api-access-2mdl4") pod "2de9e41d-ebac-4023-a405-36dca3999165" (UID: "2de9e41d-ebac-4023-a405-36dca3999165"). InnerVolumeSpecName "kube-api-access-2mdl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.218145 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2de9e41d-ebac-4023-a405-36dca3999165" (UID: "2de9e41d-ebac-4023-a405-36dca3999165"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.262635 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mdl4\" (UniqueName: \"kubernetes.io/projected/2de9e41d-ebac-4023-a405-36dca3999165-kube-api-access-2mdl4\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.262675 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.262690 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2de9e41d-ebac-4023-a405-36dca3999165-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.744628 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f76l" event={"ID":"2de9e41d-ebac-4023-a405-36dca3999165","Type":"ContainerDied","Data":"05d241f827d8cc383515d298f3c1a777ca181d7d067628bce36321f7777dd469"} Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.744674 4856 scope.go:117] "RemoveContainer" containerID="972dd81215920644368be3e6963bf61da54e5b71963ddb65115b6929f09e40d2" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.744778 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f76l" Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.787037 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:19:10 crc kubenswrapper[4856]: I1202 00:19:10.790864 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7f76l"] Dec 02 00:19:11 crc kubenswrapper[4856]: I1202 00:19:11.263678 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2de9e41d-ebac-4023-a405-36dca3999165" path="/var/lib/kubelet/pods/2de9e41d-ebac-4023-a405-36dca3999165/volumes" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.254173 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf"] Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.254959 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="registry-server" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.254974 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="registry-server" Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.254987 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="util" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.254995 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="util" Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.255009 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="pull" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255016 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="pull" Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.255028 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="extract-utilities" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255037 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="extract-utilities" Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.255055 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="extract" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255062 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="extract" Dec 02 00:19:14 crc kubenswrapper[4856]: E1202 00:19:14.255078 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="extract-content" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255085 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="extract-content" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255207 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="2de9e41d-ebac-4023-a405-36dca3999165" containerName="registry-server" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255222 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d" containerName="extract" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.255690 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.257525 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.257841 4856 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-558v7" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.258220 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.273061 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf"] Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.416335 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/8a6163c6-3649-43f9-830a-0283c9a8d91d-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.416472 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84nhh\" (UniqueName: \"kubernetes.io/projected/8a6163c6-3649-43f9-830a-0283c9a8d91d-kube-api-access-84nhh\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.518044 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84nhh\" (UniqueName: \"kubernetes.io/projected/8a6163c6-3649-43f9-830a-0283c9a8d91d-kube-api-access-84nhh\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.518125 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/8a6163c6-3649-43f9-830a-0283c9a8d91d-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.518657 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/8a6163c6-3649-43f9-830a-0283c9a8d91d-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.539734 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84nhh\" (UniqueName: \"kubernetes.io/projected/8a6163c6-3649-43f9-830a-0283c9a8d91d-kube-api-access-84nhh\") pod \"cert-manager-operator-controller-manager-5446d6888b-fl9vf\" (UID: \"8a6163c6-3649-43f9-830a-0283c9a8d91d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:14 crc kubenswrapper[4856]: I1202 00:19:14.568480 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" Dec 02 00:19:18 crc kubenswrapper[4856]: I1202 00:19:18.722479 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:18 crc kubenswrapper[4856]: I1202 00:19:18.764664 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:19 crc kubenswrapper[4856]: E1202 00:19:19.893673 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105" Dec 02 00:19:19 crc kubenswrapper[4856]: E1202 00:19:19.893854 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105,Command:[],Args:[manager --config=/conf/eck.yaml --manage-webhook-certs=false --enable-webhook --ubi-only --distribution-channel=certified-operators],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https-webhook,HostPort:0,ContainerPort:9443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:NAMESPACES,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.operatorNamespace'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_IMAGE,Value:registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:elasticsearch-eck-operator-certified.v3.2.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{1 0} {} 1 DecimalSI},memory: {{1073741824 0} {} 1Gi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mcvbt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elastic-operator-96d9b59b6-rtjhh_service-telemetry(f5aeab99-08c9-480d-bab4-8a94933ae1df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 00:19:19 crc kubenswrapper[4856]: E1202 00:19:19.895047 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" podUID="f5aeab99-08c9-480d-bab4-8a94933ae1df" Dec 02 00:19:20 crc kubenswrapper[4856]: E1202 00:19:20.804412 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck-operator@sha256:28925fffef8f7c920b2510810cbcfc0f3dadab5f8a80b01fd5ae500e5c070105\\\"\"" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" podUID="f5aeab99-08c9-480d-bab4-8a94933ae1df" Dec 02 00:19:20 crc kubenswrapper[4856]: E1202 00:19:20.844491 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 02 00:19:20 crc kubenswrapper[4856]: E1202 00:19:20.844713 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v9h6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-j272l_openshift-operators(c56dba5d-a93c-45fb-8495-846b9098c7d1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:19:20 crc kubenswrapper[4856]: E1202 00:19:20.845906 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" podUID="c56dba5d-a93c-45fb-8495-846b9098c7d1" Dec 02 00:19:21 crc kubenswrapper[4856]: E1202 00:19:21.808461 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" podUID="c56dba5d-a93c-45fb-8495-846b9098c7d1" Dec 02 00:19:22 crc kubenswrapper[4856]: I1202 00:19:22.446388 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:19:22 crc kubenswrapper[4856]: I1202 00:19:22.446644 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gqgwn" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="registry-server" containerID="cri-o://75e79f8e896c247c54017fea8896c2e40fe2e423106026288b77fe5a9c0495b0" gracePeriod=2 Dec 02 00:19:22 crc kubenswrapper[4856]: I1202 00:19:22.815324 4856 generic.go:334] "Generic (PLEG): container finished" podID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerID="75e79f8e896c247c54017fea8896c2e40fe2e423106026288b77fe5a9c0495b0" exitCode=0 Dec 02 00:19:22 crc kubenswrapper[4856]: I1202 00:19:22.815393 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerDied","Data":"75e79f8e896c247c54017fea8896c2e40fe2e423106026288b77fe5a9c0495b0"} Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.014276 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.014516 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7hjkl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-2tlhq_openshift-operators(35c7073d-b173-4200-a0fe-5df05f791e68): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.015696 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" podUID="35c7073d-b173-4200-a0fe-5df05f791e68" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.389866 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.390046 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks_openshift-operators(edfb110d-4069-42b8-a8bf-3cf5a74ba610): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.391436 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" podUID="edfb110d-4069-42b8-a8bf-3cf5a74ba610" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.401496 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.401769 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs_openshift-operators(6906a6f6-08a2-4a23-b609-4b3f37976695): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.403006 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" podUID="6906a6f6-08a2-4a23-b609-4b3f37976695" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.833384 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" podUID="edfb110d-4069-42b8-a8bf-3cf5a74ba610" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.833409 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" podUID="35c7073d-b173-4200-a0fe-5df05f791e68" Dec 02 00:19:23 crc kubenswrapper[4856]: E1202 00:19:23.833478 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" podUID="6906a6f6-08a2-4a23-b609-4b3f37976695" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.142941 4856 scope.go:117] "RemoveContainer" containerID="94491e7222cc33572c7f3f1382d72b031c41da6295698d07d23e1e450bffa493" Dec 02 00:19:24 crc kubenswrapper[4856]: E1202 00:19:24.171990 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 02 00:19:24 crc kubenswrapper[4856]: E1202 00:19:24.172276 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5nzgf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-wz88m_openshift-operators(909dd6ee-af48-452f-8de5-73fe740e006b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 00:19:24 crc kubenswrapper[4856]: E1202 00:19:24.173639 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-wz88m" podUID="909dd6ee-af48-452f-8de5-73fe740e006b" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.179086 4856 scope.go:117] "RemoveContainer" containerID="fed0f4c869cf934d974d79a1b4d93661c1612daae833c4072e404b8929671ef7" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.421555 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf"] Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.566446 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.600354 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdmxc\" (UniqueName: \"kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc\") pod \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.600422 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities\") pod \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.600697 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content\") pod \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\" (UID: \"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75\") " Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.601392 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities" (OuterVolumeSpecName: "utilities") pod "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" (UID: "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.611682 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc" (OuterVolumeSpecName: "kube-api-access-wdmxc") pod "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" (UID: "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75"). InnerVolumeSpecName "kube-api-access-wdmxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.702315 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdmxc\" (UniqueName: \"kubernetes.io/projected/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-kube-api-access-wdmxc\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.702345 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.712871 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" (UID: "06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.803978 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.836703 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" event={"ID":"8a6163c6-3649-43f9-830a-0283c9a8d91d","Type":"ContainerStarted","Data":"f7ddb60c6a76c95f2e4bd9ace97cd822794c45ed2fce7d4a29a056b815308153"} Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.839145 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqgwn" event={"ID":"06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75","Type":"ContainerDied","Data":"46482f3ddeebe8b992ed6c278095c17e47b15f1ffa05eb35a10a7dae38efb3a5"} Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.839186 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqgwn" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.839219 4856 scope.go:117] "RemoveContainer" containerID="75e79f8e896c247c54017fea8896c2e40fe2e423106026288b77fe5a9c0495b0" Dec 02 00:19:24 crc kubenswrapper[4856]: E1202 00:19:24.841652 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-wz88m" podUID="909dd6ee-af48-452f-8de5-73fe740e006b" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.862433 4856 scope.go:117] "RemoveContainer" containerID="42a24361de4fbe04ec5ada15784d5e5115905644ee7837d3e440c121849e8368" Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.873315 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.876128 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gqgwn"] Dec 02 00:19:24 crc kubenswrapper[4856]: I1202 00:19:24.892221 4856 scope.go:117] "RemoveContainer" containerID="3531b36acf35173f87de36997848d41225864dfbc2d592eda0b1923afe704d97" Dec 02 00:19:25 crc kubenswrapper[4856]: I1202 00:19:25.258631 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" path="/var/lib/kubelet/pods/06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75/volumes" Dec 02 00:19:27 crc kubenswrapper[4856]: I1202 00:19:27.866091 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" event={"ID":"8a6163c6-3649-43f9-830a-0283c9a8d91d","Type":"ContainerStarted","Data":"b90481274be3a89ed07efa8c0900dc497751be44944f312b99aaa21f7fea1dd2"} Dec 02 00:19:27 crc kubenswrapper[4856]: I1202 00:19:27.906762 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-fl9vf" podStartSLOduration=10.875028396 podStartE2EDuration="13.906744638s" podCreationTimestamp="2025-12-02 00:19:14 +0000 UTC" firstStartedPulling="2025-12-02 00:19:24.437425013 +0000 UTC m=+791.463793017" lastFinishedPulling="2025-12-02 00:19:27.469141265 +0000 UTC m=+794.495509259" observedRunningTime="2025-12-02 00:19:27.903896159 +0000 UTC m=+794.930264193" watchObservedRunningTime="2025-12-02 00:19:27.906744638 +0000 UTC m=+794.933112642" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.485746 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-mpnvc"] Dec 02 00:19:31 crc kubenswrapper[4856]: E1202 00:19:31.486380 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="extract-content" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.486391 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="extract-content" Dec 02 00:19:31 crc kubenswrapper[4856]: E1202 00:19:31.486414 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="registry-server" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.486422 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="registry-server" Dec 02 00:19:31 crc kubenswrapper[4856]: E1202 00:19:31.486431 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="extract-utilities" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.486439 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="extract-utilities" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.486534 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="06adfe3c-90b0-4f00-9f2d-a9af5b0c5e75" containerName="registry-server" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.486916 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.489000 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.489349 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.489530 4856 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-zzlkn" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.504909 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-mpnvc"] Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.590440 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.590575 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k9ht\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-kube-api-access-4k9ht\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.692072 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k9ht\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-kube-api-access-4k9ht\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.692154 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.709833 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.710478 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k9ht\" (UniqueName: \"kubernetes.io/projected/c5f791d3-d7d5-4523-a510-1a73220082dd-kube-api-access-4k9ht\") pod \"cert-manager-webhook-f4fb5df64-mpnvc\" (UID: \"c5f791d3-d7d5-4523-a510-1a73220082dd\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:31 crc kubenswrapper[4856]: I1202 00:19:31.804417 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.212541 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-mpnvc"] Dec 02 00:19:32 crc kubenswrapper[4856]: W1202 00:19:32.216496 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5f791d3_d7d5_4523_a510_1a73220082dd.slice/crio-4554736187c943d3c7e3b6fff36d2084126d9211d8a4a9bc4cfa4383134d1e76 WatchSource:0}: Error finding container 4554736187c943d3c7e3b6fff36d2084126d9211d8a4a9bc4cfa4383134d1e76: Status 404 returned error can't find the container with id 4554736187c943d3c7e3b6fff36d2084126d9211d8a4a9bc4cfa4383134d1e76 Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.426271 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb"] Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.427397 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.433893 4856 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-sqth4" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.453993 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb"] Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.500791 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hck4q\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-kube-api-access-hck4q\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.500847 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.601869 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hck4q\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-kube-api-access-hck4q\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.601934 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.624264 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.625736 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hck4q\" (UniqueName: \"kubernetes.io/projected/d75f8753-c27e-49df-8086-66f21b91d98a-kube-api-access-hck4q\") pod \"cert-manager-cainjector-855d9ccff4-gxmkb\" (UID: \"d75f8753-c27e-49df-8086-66f21b91d98a\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.742050 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" Dec 02 00:19:32 crc kubenswrapper[4856]: I1202 00:19:32.893647 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" event={"ID":"c5f791d3-d7d5-4523-a510-1a73220082dd","Type":"ContainerStarted","Data":"4554736187c943d3c7e3b6fff36d2084126d9211d8a4a9bc4cfa4383134d1e76"} Dec 02 00:19:33 crc kubenswrapper[4856]: I1202 00:19:33.012842 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb"] Dec 02 00:19:33 crc kubenswrapper[4856]: I1202 00:19:33.900006 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" event={"ID":"d75f8753-c27e-49df-8086-66f21b91d98a","Type":"ContainerStarted","Data":"383f792051d4f4288f06c726551f5132afa8f9af808a014826b987eef391d70a"} Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.075294 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.075341 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.075378 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.075867 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.075908 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e" gracePeriod=600 Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.913163 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e" exitCode=0 Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.913364 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e"} Dec 02 00:19:35 crc kubenswrapper[4856]: I1202 00:19:35.913401 4856 scope.go:117] "RemoveContainer" containerID="1a22a1073572a0b7416ea74ea0de4e4adcb24242e1feb1c293c982dbafd00b1f" Dec 02 00:19:39 crc kubenswrapper[4856]: I1202 00:19:39.955800 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.962297 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" event={"ID":"6906a6f6-08a2-4a23-b609-4b3f37976695","Type":"ContainerStarted","Data":"65a0f8362d71598bd5b0781a4c349206a5eef74bab1b58b8d4e6571ea6f2cb2d"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.963445 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" event={"ID":"d75f8753-c27e-49df-8086-66f21b91d98a","Type":"ContainerStarted","Data":"44af2471229cc834c541c4fbe935276e19f415cf1ccca16be77a72645ea96326"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.964895 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" event={"ID":"f5aeab99-08c9-480d-bab4-8a94933ae1df","Type":"ContainerStarted","Data":"bb92ab4f6b0e18217ce5dd77e9d5967ada88245347413b1c3e6031ca4a69954b"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.966163 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" event={"ID":"edfb110d-4069-42b8-a8bf-3cf5a74ba610","Type":"ContainerStarted","Data":"e31d061de7f10f80afe2410ad6598eeb7691f836737b3b35f95f3939887d959d"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.967620 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" event={"ID":"c5f791d3-d7d5-4523-a510-1a73220082dd","Type":"ContainerStarted","Data":"fe2b66edf861733c1bae78d4abe538f49b85ca690567bf11996764656808d231"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.967762 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.969133 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" event={"ID":"c56dba5d-a93c-45fb-8495-846b9098c7d1","Type":"ContainerStarted","Data":"b61ce30b810058db996786671aad4ed02ddd2aa9ce28141e5875ea41dd65f128"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.970377 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" event={"ID":"35c7073d-b173-4200-a0fe-5df05f791e68","Type":"ContainerStarted","Data":"ba9559512b9b2b88b146084fe06ae3b9bdbf2adba33b62f1e1de8b6a1b531820"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.970703 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.972136 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-wz88m" event={"ID":"909dd6ee-af48-452f-8de5-73fe740e006b","Type":"ContainerStarted","Data":"93e739e236b32d930270bb4b3dffbb0ef80a14ad8b59cf332e0ca16718a02e7c"} Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.972365 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.972679 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" Dec 02 00:19:40 crc kubenswrapper[4856]: I1202 00:19:40.984673 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs" podStartSLOduration=4.295594041 podStartE2EDuration="42.984661022s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:01.130807628 +0000 UTC m=+768.157175632" lastFinishedPulling="2025-12-02 00:19:39.819874609 +0000 UTC m=+806.846242613" observedRunningTime="2025-12-02 00:19:40.983661208 +0000 UTC m=+808.010029212" watchObservedRunningTime="2025-12-02 00:19:40.984661022 +0000 UTC m=+808.011029056" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.000262 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j272l" podStartSLOduration=4.236450661 podStartE2EDuration="43.00024098s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:01.046744093 +0000 UTC m=+768.073112097" lastFinishedPulling="2025-12-02 00:19:39.810534412 +0000 UTC m=+806.836902416" observedRunningTime="2025-12-02 00:19:40.998457617 +0000 UTC m=+808.024825621" watchObservedRunningTime="2025-12-02 00:19:41.00024098 +0000 UTC m=+808.026608984" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.026667 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-wz88m" podStartSLOduration=4.222265954 podStartE2EDuration="43.026646381s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:00.993020816 +0000 UTC m=+768.019388820" lastFinishedPulling="2025-12-02 00:19:39.797401243 +0000 UTC m=+806.823769247" observedRunningTime="2025-12-02 00:19:41.020803259 +0000 UTC m=+808.047171283" watchObservedRunningTime="2025-12-02 00:19:41.026646381 +0000 UTC m=+808.053014395" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.038706 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" podStartSLOduration=2.365903435 podStartE2EDuration="10.038690304s" podCreationTimestamp="2025-12-02 00:19:31 +0000 UTC" firstStartedPulling="2025-12-02 00:19:32.218318318 +0000 UTC m=+799.244686322" lastFinishedPulling="2025-12-02 00:19:39.891105187 +0000 UTC m=+806.917473191" observedRunningTime="2025-12-02 00:19:41.036582962 +0000 UTC m=+808.062950976" watchObservedRunningTime="2025-12-02 00:19:41.038690304 +0000 UTC m=+808.065058318" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.089223 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-gxmkb" podStartSLOduration=2.318914736 podStartE2EDuration="9.0891995s" podCreationTimestamp="2025-12-02 00:19:32 +0000 UTC" firstStartedPulling="2025-12-02 00:19:33.027101479 +0000 UTC m=+800.053469483" lastFinishedPulling="2025-12-02 00:19:39.797386243 +0000 UTC m=+806.823754247" observedRunningTime="2025-12-02 00:19:41.05953829 +0000 UTC m=+808.085906304" watchObservedRunningTime="2025-12-02 00:19:41.0891995 +0000 UTC m=+808.115567504" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.099468 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-2tlhq" podStartSLOduration=4.3290051179999995 podStartE2EDuration="43.099446718s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:01.047054051 +0000 UTC m=+768.073422055" lastFinishedPulling="2025-12-02 00:19:39.817495651 +0000 UTC m=+806.843863655" observedRunningTime="2025-12-02 00:19:41.097988643 +0000 UTC m=+808.124356657" watchObservedRunningTime="2025-12-02 00:19:41.099446718 +0000 UTC m=+808.125814712" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.150294 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks" podStartSLOduration=4.557167516 podStartE2EDuration="43.150279402s" podCreationTimestamp="2025-12-02 00:18:58 +0000 UTC" firstStartedPulling="2025-12-02 00:19:01.224709293 +0000 UTC m=+768.251077297" lastFinishedPulling="2025-12-02 00:19:39.817821169 +0000 UTC m=+806.844189183" observedRunningTime="2025-12-02 00:19:41.14977863 +0000 UTC m=+808.176146634" watchObservedRunningTime="2025-12-02 00:19:41.150279402 +0000 UTC m=+808.176647406" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.151469 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-96d9b59b6-rtjhh" podStartSLOduration=2.954625373 podStartE2EDuration="39.151460851s" podCreationTimestamp="2025-12-02 00:19:02 +0000 UTC" firstStartedPulling="2025-12-02 00:19:03.620471798 +0000 UTC m=+770.646839802" lastFinishedPulling="2025-12-02 00:19:39.817307266 +0000 UTC m=+806.843675280" observedRunningTime="2025-12-02 00:19:41.129872837 +0000 UTC m=+808.156240841" watchObservedRunningTime="2025-12-02 00:19:41.151460851 +0000 UTC m=+808.177828855" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.451427 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.452828 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.457121 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-46j8q" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.462387 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.462849 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463009 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463142 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463298 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463424 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463423 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.463645 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.474102 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.517553 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bntkc"] Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.518359 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.520376 4856 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-lwqlz" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.527988 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bntkc"] Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538344 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538407 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538434 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538459 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538483 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/97ecd658-bf64-4607-9f66-6b976ea97c3c-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538506 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538533 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538566 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538612 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538639 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538673 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538698 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538722 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538757 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.538787 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639630 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639673 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639701 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639730 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n82px\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-kube-api-access-n82px\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639761 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639785 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639842 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639869 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639887 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639908 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639928 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/97ecd658-bf64-4607-9f66-6b976ea97c3c-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639947 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639969 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.639995 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640014 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640033 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640056 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-bound-sa-token\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640810 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640929 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.640971 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.641753 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.641776 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.641831 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/97ecd658-bf64-4607-9f66-6b976ea97c3c-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.642206 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.643180 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.645242 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/97ecd658-bf64-4607-9f66-6b976ea97c3c-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.645242 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.645251 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.645325 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.645483 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.646714 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.646955 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/97ecd658-bf64-4607-9f66-6b976ea97c3c-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"97ecd658-bf64-4607-9f66-6b976ea97c3c\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.740700 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n82px\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-kube-api-access-n82px\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.741081 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-bound-sa-token\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.759485 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-bound-sa-token\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.760138 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n82px\" (UniqueName: \"kubernetes.io/projected/65ef0d24-261e-488f-ad30-35ae9ef4a68a-kube-api-access-n82px\") pod \"cert-manager-86cb77c54b-bntkc\" (UID: \"65ef0d24-261e-488f-ad30-35ae9ef4a68a\") " pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.780391 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:19:41 crc kubenswrapper[4856]: I1202 00:19:41.836202 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-bntkc" Dec 02 00:19:42 crc kubenswrapper[4856]: I1202 00:19:42.392021 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 02 00:19:42 crc kubenswrapper[4856]: I1202 00:19:42.428773 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-bntkc"] Dec 02 00:19:42 crc kubenswrapper[4856]: W1202 00:19:42.449749 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65ef0d24_261e_488f_ad30_35ae9ef4a68a.slice/crio-09b18704c5badb3cb950a94451a0589e614310e014c42e755817371d2e2c0b11 WatchSource:0}: Error finding container 09b18704c5badb3cb950a94451a0589e614310e014c42e755817371d2e2c0b11: Status 404 returned error can't find the container with id 09b18704c5badb3cb950a94451a0589e614310e014c42e755817371d2e2c0b11 Dec 02 00:19:42 crc kubenswrapper[4856]: I1202 00:19:42.987000 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"97ecd658-bf64-4607-9f66-6b976ea97c3c","Type":"ContainerStarted","Data":"5f6d8c60436cdf4119e1db8eaf85a8349872252b4975f4632bcb625afa700cb9"} Dec 02 00:19:42 crc kubenswrapper[4856]: I1202 00:19:42.988809 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-bntkc" event={"ID":"65ef0d24-261e-488f-ad30-35ae9ef4a68a","Type":"ContainerStarted","Data":"09c032e0894302ee5cbeaa0293ca28de49fc2c3c8cce2017e8103bfc5a1777c9"} Dec 02 00:19:42 crc kubenswrapper[4856]: I1202 00:19:42.988860 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-bntkc" event={"ID":"65ef0d24-261e-488f-ad30-35ae9ef4a68a","Type":"ContainerStarted","Data":"09b18704c5badb3cb950a94451a0589e614310e014c42e755817371d2e2c0b11"} Dec 02 00:19:43 crc kubenswrapper[4856]: I1202 00:19:43.009443 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-bntkc" podStartSLOduration=2.009420492 podStartE2EDuration="2.009420492s" podCreationTimestamp="2025-12-02 00:19:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:19:43.005948188 +0000 UTC m=+810.032316222" watchObservedRunningTime="2025-12-02 00:19:43.009420492 +0000 UTC m=+810.035788496" Dec 02 00:19:46 crc kubenswrapper[4856]: I1202 00:19:46.807465 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-mpnvc" Dec 02 00:19:49 crc kubenswrapper[4856]: I1202 00:19:49.208424 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-wz88m" Dec 02 00:19:55 crc kubenswrapper[4856]: I1202 00:19:55.085240 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"97ecd658-bf64-4607-9f66-6b976ea97c3c","Type":"ContainerStarted","Data":"c084984332eb7efa2c122622d5d2ed7ea341acab0607ad1586f43b76bd3bfad6"} Dec 02 00:19:55 crc kubenswrapper[4856]: I1202 00:19:55.300919 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 02 00:19:55 crc kubenswrapper[4856]: I1202 00:19:55.337423 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 02 00:19:57 crc kubenswrapper[4856]: I1202 00:19:57.095933 4856 generic.go:334] "Generic (PLEG): container finished" podID="97ecd658-bf64-4607-9f66-6b976ea97c3c" containerID="c084984332eb7efa2c122622d5d2ed7ea341acab0607ad1586f43b76bd3bfad6" exitCode=0 Dec 02 00:19:57 crc kubenswrapper[4856]: I1202 00:19:57.096017 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"97ecd658-bf64-4607-9f66-6b976ea97c3c","Type":"ContainerDied","Data":"c084984332eb7efa2c122622d5d2ed7ea341acab0607ad1586f43b76bd3bfad6"} Dec 02 00:20:00 crc kubenswrapper[4856]: I1202 00:20:00.116219 4856 generic.go:334] "Generic (PLEG): container finished" podID="97ecd658-bf64-4607-9f66-6b976ea97c3c" containerID="cd82b772ed56f03b229c66afde086adf507a49b8cb6fe2d17179386fadf1efda" exitCode=0 Dec 02 00:20:00 crc kubenswrapper[4856]: I1202 00:20:00.116277 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"97ecd658-bf64-4607-9f66-6b976ea97c3c","Type":"ContainerDied","Data":"cd82b772ed56f03b229c66afde086adf507a49b8cb6fe2d17179386fadf1efda"} Dec 02 00:20:01 crc kubenswrapper[4856]: I1202 00:20:01.124235 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"97ecd658-bf64-4607-9f66-6b976ea97c3c","Type":"ContainerStarted","Data":"e836346c1a95afeb00a08ff12be7e284aca559e6390be9a198738fa1ffdecafe"} Dec 02 00:20:01 crc kubenswrapper[4856]: I1202 00:20:01.124424 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:20:01 crc kubenswrapper[4856]: I1202 00:20:01.153100 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=8.009892779 podStartE2EDuration="20.153082772s" podCreationTimestamp="2025-12-02 00:19:41 +0000 UTC" firstStartedPulling="2025-12-02 00:19:42.407113481 +0000 UTC m=+809.433481495" lastFinishedPulling="2025-12-02 00:19:54.550303484 +0000 UTC m=+821.576671488" observedRunningTime="2025-12-02 00:20:01.148785497 +0000 UTC m=+828.175153521" watchObservedRunningTime="2025-12-02 00:20:01.153082772 +0000 UTC m=+828.179450776" Dec 02 00:20:11 crc kubenswrapper[4856]: I1202 00:20:11.864841 4856 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="97ecd658-bf64-4607-9f66-6b976ea97c3c" containerName="elasticsearch" probeResult="failure" output=< Dec 02 00:20:11 crc kubenswrapper[4856]: {"timestamp": "2025-12-02T00:20:11+00:00", "message": "readiness probe failed", "curl_rc": "7"} Dec 02 00:20:11 crc kubenswrapper[4856]: > Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.144346 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.147793 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.150853 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-5tpp6" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.150938 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.151174 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.151659 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.155865 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.180230 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.227849 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.227921 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228020 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-5tpp6-push\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228085 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228220 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228276 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-5tpp6-pull\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228346 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228451 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228507 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228564 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkh8l\" (UniqueName: \"kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228649 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228691 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.228735 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.329991 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-5tpp6-pull\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330092 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330123 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330148 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330180 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkh8l\" (UniqueName: \"kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330212 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330239 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330277 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330353 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330384 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330418 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-5tpp6-push\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330455 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.330522 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.331055 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.331673 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.331925 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.331991 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.333519 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.334306 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.334327 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.334362 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.334752 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.340456 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-5tpp6-pull\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.340889 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.341172 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-5tpp6-push\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.371773 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkh8l\" (UniqueName: \"kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l\") pod \"service-telemetry-framework-index-1-build\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.468643 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:20:15 crc kubenswrapper[4856]: I1202 00:20:15.737654 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 02 00:20:16 crc kubenswrapper[4856]: I1202 00:20:16.238643 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerStarted","Data":"d942481e861de0ef4f2d0e01adf06accf3997dc385dd6b06c6bc7a3925f965af"} Dec 02 00:20:17 crc kubenswrapper[4856]: I1202 00:20:17.112891 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Dec 02 00:20:22 crc kubenswrapper[4856]: I1202 00:20:22.287300 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerStarted","Data":"4acf9c5b179913f3b4df84647ede55a82cb5f394fd0200c0445415aa14036fcf"} Dec 02 00:20:23 crc kubenswrapper[4856]: I1202 00:20:23.292993 4856 generic.go:334] "Generic (PLEG): container finished" podID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerID="4acf9c5b179913f3b4df84647ede55a82cb5f394fd0200c0445415aa14036fcf" exitCode=0 Dec 02 00:20:23 crc kubenswrapper[4856]: I1202 00:20:23.293083 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerDied","Data":"4acf9c5b179913f3b4df84647ede55a82cb5f394fd0200c0445415aa14036fcf"} Dec 02 00:20:24 crc kubenswrapper[4856]: I1202 00:20:24.302666 4856 generic.go:334] "Generic (PLEG): container finished" podID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerID="c3707d8fb2189be7c3691fa430e56772a0860135d9080b3edc700b07e125dd75" exitCode=0 Dec 02 00:20:24 crc kubenswrapper[4856]: I1202 00:20:24.302759 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerDied","Data":"c3707d8fb2189be7c3691fa430e56772a0860135d9080b3edc700b07e125dd75"} Dec 02 00:20:24 crc kubenswrapper[4856]: I1202 00:20:24.356216 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65/manage-dockerfile/0.log" Dec 02 00:20:27 crc kubenswrapper[4856]: I1202 00:20:27.326299 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerStarted","Data":"11644350a8c59b6f3629ba7960d8c5612e3d7572bc4ba1c118bfe65802872a5b"} Dec 02 00:20:27 crc kubenswrapper[4856]: I1202 00:20:27.371185 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=6.991588165 podStartE2EDuration="12.37113909s" podCreationTimestamp="2025-12-02 00:20:15 +0000 UTC" firstStartedPulling="2025-12-02 00:20:15.743270243 +0000 UTC m=+842.769638237" lastFinishedPulling="2025-12-02 00:20:21.122821168 +0000 UTC m=+848.149189162" observedRunningTime="2025-12-02 00:20:27.36661878 +0000 UTC m=+854.392986814" watchObservedRunningTime="2025-12-02 00:20:27.37113909 +0000 UTC m=+854.397507124" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.686522 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.688131 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.699267 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.748437 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgb95\" (UniqueName: \"kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.748512 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.748544 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.863377 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgb95\" (UniqueName: \"kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.863486 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.863528 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.864033 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.864723 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:43 crc kubenswrapper[4856]: I1202 00:20:43.887035 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgb95\" (UniqueName: \"kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95\") pod \"community-operators-5c9tq\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:44 crc kubenswrapper[4856]: I1202 00:20:44.134757 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:44 crc kubenswrapper[4856]: I1202 00:20:44.461368 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:44 crc kubenswrapper[4856]: I1202 00:20:44.825719 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerStarted","Data":"fe028ba700cf09ed88c144705e38e616b129020efaed8ed8405eda977de67d65"} Dec 02 00:20:45 crc kubenswrapper[4856]: I1202 00:20:45.832363 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerStarted","Data":"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce"} Dec 02 00:20:46 crc kubenswrapper[4856]: I1202 00:20:46.841276 4856 generic.go:334] "Generic (PLEG): container finished" podID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerID="466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce" exitCode=0 Dec 02 00:20:46 crc kubenswrapper[4856]: I1202 00:20:46.841351 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerDied","Data":"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce"} Dec 02 00:20:48 crc kubenswrapper[4856]: I1202 00:20:48.860811 4856 generic.go:334] "Generic (PLEG): container finished" podID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerID="22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc" exitCode=0 Dec 02 00:20:48 crc kubenswrapper[4856]: I1202 00:20:48.861103 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerDied","Data":"22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc"} Dec 02 00:20:49 crc kubenswrapper[4856]: I1202 00:20:49.867581 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerStarted","Data":"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4"} Dec 02 00:20:49 crc kubenswrapper[4856]: I1202 00:20:49.884267 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5c9tq" podStartSLOduration=4.241656908 podStartE2EDuration="6.884244714s" podCreationTimestamp="2025-12-02 00:20:43 +0000 UTC" firstStartedPulling="2025-12-02 00:20:46.843565954 +0000 UTC m=+873.869933968" lastFinishedPulling="2025-12-02 00:20:49.48615378 +0000 UTC m=+876.512521774" observedRunningTime="2025-12-02 00:20:49.882059831 +0000 UTC m=+876.908427835" watchObservedRunningTime="2025-12-02 00:20:49.884244714 +0000 UTC m=+876.910612738" Dec 02 00:20:54 crc kubenswrapper[4856]: I1202 00:20:54.135646 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:54 crc kubenswrapper[4856]: I1202 00:20:54.137336 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:54 crc kubenswrapper[4856]: I1202 00:20:54.191319 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:54 crc kubenswrapper[4856]: I1202 00:20:54.947317 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:55 crc kubenswrapper[4856]: I1202 00:20:55.009719 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:56 crc kubenswrapper[4856]: I1202 00:20:56.912964 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5c9tq" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="registry-server" containerID="cri-o://a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4" gracePeriod=2 Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.309922 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.349476 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities\") pod \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.349569 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgb95\" (UniqueName: \"kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95\") pod \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.349623 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content\") pod \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\" (UID: \"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9\") " Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.351921 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities" (OuterVolumeSpecName: "utilities") pod "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" (UID: "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.357573 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95" (OuterVolumeSpecName: "kube-api-access-hgb95") pod "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" (UID: "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9"). InnerVolumeSpecName "kube-api-access-hgb95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.412985 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" (UID: "3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.451006 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgb95\" (UniqueName: \"kubernetes.io/projected/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-kube-api-access-hgb95\") on node \"crc\" DevicePath \"\"" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.451043 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.451052 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.924819 4856 generic.go:334] "Generic (PLEG): container finished" podID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerID="a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4" exitCode=0 Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.924926 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c9tq" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.924899 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerDied","Data":"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4"} Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.925924 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c9tq" event={"ID":"3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9","Type":"ContainerDied","Data":"fe028ba700cf09ed88c144705e38e616b129020efaed8ed8405eda977de67d65"} Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.925993 4856 scope.go:117] "RemoveContainer" containerID="a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.952502 4856 scope.go:117] "RemoveContainer" containerID="22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc" Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.983582 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.992115 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5c9tq"] Dec 02 00:20:57 crc kubenswrapper[4856]: I1202 00:20:57.994065 4856 scope.go:117] "RemoveContainer" containerID="466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.013647 4856 scope.go:117] "RemoveContainer" containerID="a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4" Dec 02 00:20:58 crc kubenswrapper[4856]: E1202 00:20:58.014880 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4\": container with ID starting with a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4 not found: ID does not exist" containerID="a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.014967 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4"} err="failed to get container status \"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4\": rpc error: code = NotFound desc = could not find container \"a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4\": container with ID starting with a33347150ec572b1f1968d144b86da126057356dd84048d8a4b8cdca0d487fa4 not found: ID does not exist" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.015019 4856 scope.go:117] "RemoveContainer" containerID="22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc" Dec 02 00:20:58 crc kubenswrapper[4856]: E1202 00:20:58.015906 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc\": container with ID starting with 22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc not found: ID does not exist" containerID="22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.015941 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc"} err="failed to get container status \"22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc\": rpc error: code = NotFound desc = could not find container \"22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc\": container with ID starting with 22285b89c11fcf97ab9ada9c275ef18b316a58b5df653381cab64dd4407730bc not found: ID does not exist" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.015965 4856 scope.go:117] "RemoveContainer" containerID="466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce" Dec 02 00:20:58 crc kubenswrapper[4856]: E1202 00:20:58.016312 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce\": container with ID starting with 466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce not found: ID does not exist" containerID="466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce" Dec 02 00:20:58 crc kubenswrapper[4856]: I1202 00:20:58.016338 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce"} err="failed to get container status \"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce\": rpc error: code = NotFound desc = could not find container \"466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce\": container with ID starting with 466f85474de11b5a51946b170e7916dfbc7be30b92e58c12c95c917ab67ccfce not found: ID does not exist" Dec 02 00:20:59 crc kubenswrapper[4856]: I1202 00:20:59.259558 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" path="/var/lib/kubelet/pods/3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9/volumes" Dec 02 00:21:05 crc kubenswrapper[4856]: I1202 00:21:05.977146 4856 generic.go:334] "Generic (PLEG): container finished" podID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerID="11644350a8c59b6f3629ba7960d8c5612e3d7572bc4ba1c118bfe65802872a5b" exitCode=0 Dec 02 00:21:05 crc kubenswrapper[4856]: I1202 00:21:05.977230 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerDied","Data":"11644350a8c59b6f3629ba7960d8c5612e3d7572bc4ba1c118bfe65802872a5b"} Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.241555 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.295881 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.295949 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkh8l\" (UniqueName: \"kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.295990 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296016 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296044 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-5tpp6-push\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296070 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296107 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296139 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-5tpp6-pull\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296155 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296178 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296201 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296212 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296251 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root\") pod \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\" (UID: \"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65\") " Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296545 4856 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296818 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.296934 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.297105 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.300049 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.300239 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.300858 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.302115 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push" (OuterVolumeSpecName: "builder-dockercfg-5tpp6-push") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "builder-dockercfg-5tpp6-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.302476 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.303430 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull" (OuterVolumeSpecName: "builder-dockercfg-5tpp6-pull") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "builder-dockercfg-5tpp6-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.303787 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l" (OuterVolumeSpecName: "kube-api-access-xkh8l") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "kube-api-access-xkh8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397716 4856 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397770 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkh8l\" (UniqueName: \"kubernetes.io/projected/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-kube-api-access-xkh8l\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397786 4856 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397800 4856 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397812 4856 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-5tpp6-push\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-push\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397824 4856 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397837 4856 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-5tpp6-pull\" (UniqueName: \"kubernetes.io/secret/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-builder-dockercfg-5tpp6-pull\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397849 4856 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397857 4856 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.397869 4856 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.485516 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.499647 4856 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.991565 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.991610 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65","Type":"ContainerDied","Data":"d942481e861de0ef4f2d0e01adf06accf3997dc385dd6b06c6bc7a3925f965af"} Dec 02 00:21:07 crc kubenswrapper[4856]: I1202 00:21:07.992256 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d942481e861de0ef4f2d0e01adf06accf3997dc385dd6b06c6bc7a3925f965af" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.244120 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" (UID: "226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.314931 4856 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780302 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780509 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="manage-dockerfile" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780521 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="manage-dockerfile" Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780529 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="extract-content" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780534 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="extract-content" Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780546 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="git-clone" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780553 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="git-clone" Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780565 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="extract-utilities" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780571 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="extract-utilities" Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780584 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="registry-server" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780660 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="registry-server" Dec 02 00:21:08 crc kubenswrapper[4856]: E1202 00:21:08.780668 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="docker-build" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780675 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="docker-build" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780766 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c7b37d8-4fd1-4cb9-b5f6-2a4378b607f9" containerName="registry-server" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.780780 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="226c13e6-6c5e-4cb1-a65f-8dfa4a0e3e65" containerName="docker-build" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.781142 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.783007 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-jtk5k" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.796362 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.821390 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kqdq\" (UniqueName: \"kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq\") pod \"infrawatch-operators-42jvs\" (UID: \"7eebda05-158b-4ed9-a391-fd2622cc6b75\") " pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.921969 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kqdq\" (UniqueName: \"kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq\") pod \"infrawatch-operators-42jvs\" (UID: \"7eebda05-158b-4ed9-a391-fd2622cc6b75\") " pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:08 crc kubenswrapper[4856]: I1202 00:21:08.938121 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kqdq\" (UniqueName: \"kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq\") pod \"infrawatch-operators-42jvs\" (UID: \"7eebda05-158b-4ed9-a391-fd2622cc6b75\") " pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:09 crc kubenswrapper[4856]: I1202 00:21:09.099786 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:09 crc kubenswrapper[4856]: I1202 00:21:09.292487 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:10 crc kubenswrapper[4856]: I1202 00:21:10.006735 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-42jvs" event={"ID":"7eebda05-158b-4ed9-a391-fd2622cc6b75","Type":"ContainerStarted","Data":"febe9edda7fc5e20612f97f04c96537468dbedbba9ead662cb654220c7889753"} Dec 02 00:21:13 crc kubenswrapper[4856]: I1202 00:21:13.167553 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:13 crc kubenswrapper[4856]: I1202 00:21:13.977799 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-pb4hp"] Dec 02 00:21:13 crc kubenswrapper[4856]: I1202 00:21:13.978577 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:13 crc kubenswrapper[4856]: I1202 00:21:13.988093 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-pb4hp"] Dec 02 00:21:14 crc kubenswrapper[4856]: I1202 00:21:14.130172 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4nrv\" (UniqueName: \"kubernetes.io/projected/cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6-kube-api-access-h4nrv\") pod \"infrawatch-operators-pb4hp\" (UID: \"cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6\") " pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:14 crc kubenswrapper[4856]: I1202 00:21:14.231214 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4nrv\" (UniqueName: \"kubernetes.io/projected/cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6-kube-api-access-h4nrv\") pod \"infrawatch-operators-pb4hp\" (UID: \"cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6\") " pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:14 crc kubenswrapper[4856]: I1202 00:21:14.248976 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4nrv\" (UniqueName: \"kubernetes.io/projected/cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6-kube-api-access-h4nrv\") pod \"infrawatch-operators-pb4hp\" (UID: \"cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6\") " pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:14 crc kubenswrapper[4856]: I1202 00:21:14.292948 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:20 crc kubenswrapper[4856]: I1202 00:21:20.269760 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-pb4hp"] Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.105963 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-42jvs" event={"ID":"7eebda05-158b-4ed9-a391-fd2622cc6b75","Type":"ContainerStarted","Data":"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6"} Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.106150 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-42jvs" podUID="7eebda05-158b-4ed9-a391-fd2622cc6b75" containerName="registry-server" containerID="cri-o://b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6" gracePeriod=2 Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.107689 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-pb4hp" event={"ID":"cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6","Type":"ContainerStarted","Data":"1718e5ab95bd62e09888d16288ba5af08696bef1b7a5709d2a8257272ad94a3e"} Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.107717 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-pb4hp" event={"ID":"cc93804c-1eb1-42f2-a96a-e32ee7fcbbc6","Type":"ContainerStarted","Data":"635ded27542fad60412fa39d6cb955e0858f3daa09a1a2eb05018f1067ab2c21"} Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.126753 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-42jvs" podStartSLOduration=1.597722202 podStartE2EDuration="15.126733794s" podCreationTimestamp="2025-12-02 00:21:08 +0000 UTC" firstStartedPulling="2025-12-02 00:21:09.300947528 +0000 UTC m=+896.327315532" lastFinishedPulling="2025-12-02 00:21:22.82995912 +0000 UTC m=+909.856327124" observedRunningTime="2025-12-02 00:21:23.126021447 +0000 UTC m=+910.152389451" watchObservedRunningTime="2025-12-02 00:21:23.126733794 +0000 UTC m=+910.153101798" Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.147237 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-pb4hp" podStartSLOduration=9.933174646 podStartE2EDuration="10.147207261s" podCreationTimestamp="2025-12-02 00:21:13 +0000 UTC" firstStartedPulling="2025-12-02 00:21:22.558692046 +0000 UTC m=+909.585060050" lastFinishedPulling="2025-12-02 00:21:22.772724651 +0000 UTC m=+909.799092665" observedRunningTime="2025-12-02 00:21:23.14262675 +0000 UTC m=+910.168994774" watchObservedRunningTime="2025-12-02 00:21:23.147207261 +0000 UTC m=+910.173575265" Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.436450 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.462194 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kqdq\" (UniqueName: \"kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq\") pod \"7eebda05-158b-4ed9-a391-fd2622cc6b75\" (UID: \"7eebda05-158b-4ed9-a391-fd2622cc6b75\") " Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.467908 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq" (OuterVolumeSpecName: "kube-api-access-2kqdq") pod "7eebda05-158b-4ed9-a391-fd2622cc6b75" (UID: "7eebda05-158b-4ed9-a391-fd2622cc6b75"). InnerVolumeSpecName "kube-api-access-2kqdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:21:23 crc kubenswrapper[4856]: I1202 00:21:23.563769 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kqdq\" (UniqueName: \"kubernetes.io/projected/7eebda05-158b-4ed9-a391-fd2622cc6b75-kube-api-access-2kqdq\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.114444 4856 generic.go:334] "Generic (PLEG): container finished" podID="7eebda05-158b-4ed9-a391-fd2622cc6b75" containerID="b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6" exitCode=0 Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.114517 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-42jvs" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.114540 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-42jvs" event={"ID":"7eebda05-158b-4ed9-a391-fd2622cc6b75","Type":"ContainerDied","Data":"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6"} Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.114631 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-42jvs" event={"ID":"7eebda05-158b-4ed9-a391-fd2622cc6b75","Type":"ContainerDied","Data":"febe9edda7fc5e20612f97f04c96537468dbedbba9ead662cb654220c7889753"} Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.114673 4856 scope.go:117] "RemoveContainer" containerID="b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.131383 4856 scope.go:117] "RemoveContainer" containerID="b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6" Dec 02 00:21:24 crc kubenswrapper[4856]: E1202 00:21:24.132024 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6\": container with ID starting with b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6 not found: ID does not exist" containerID="b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.132070 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6"} err="failed to get container status \"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6\": rpc error: code = NotFound desc = could not find container \"b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6\": container with ID starting with b666dec95284eb62761efe0b8bac7d92ef66a5a1544a605cc3a5c8a0c82308a6 not found: ID does not exist" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.145369 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.149395 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-42jvs"] Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.294289 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.294318 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:24 crc kubenswrapper[4856]: I1202 00:21:24.320237 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:25 crc kubenswrapper[4856]: I1202 00:21:25.258830 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eebda05-158b-4ed9-a391-fd2622cc6b75" path="/var/lib/kubelet/pods/7eebda05-158b-4ed9-a391-fd2622cc6b75/volumes" Dec 02 00:21:34 crc kubenswrapper[4856]: I1202 00:21:34.333031 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-pb4hp" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.034648 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j"] Dec 02 00:21:36 crc kubenswrapper[4856]: E1202 00:21:36.034955 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eebda05-158b-4ed9-a391-fd2622cc6b75" containerName="registry-server" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.034975 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eebda05-158b-4ed9-a391-fd2622cc6b75" containerName="registry-server" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.035124 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eebda05-158b-4ed9-a391-fd2622cc6b75" containerName="registry-server" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.036325 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.053335 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j"] Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.147337 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.147478 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.147623 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f5r8\" (UniqueName: \"kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.249373 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.249442 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.249530 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f5r8\" (UniqueName: \"kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.250186 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.250341 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.271619 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f5r8\" (UniqueName: \"kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.360315 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.438471 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5"] Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.439787 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.442033 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.453282 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5"] Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.555192 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.555291 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.556654 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz9kt\" (UniqueName: \"kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.658124 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz9kt\" (UniqueName: \"kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.658457 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.658538 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.659288 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.659319 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.678382 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz9kt\" (UniqueName: \"kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.767531 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:36 crc kubenswrapper[4856]: I1202 00:21:36.801446 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j"] Dec 02 00:21:36 crc kubenswrapper[4856]: W1202 00:21:36.801666 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd6195e5_e415_4aff_b4cd_d9e2bbb3dd80.slice/crio-bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb WatchSource:0}: Error finding container bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb: Status 404 returned error can't find the container with id bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.021227 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5"] Dec 02 00:21:37 crc kubenswrapper[4856]: W1202 00:21:37.025706 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd110afca_827d_4691_9e3a_5804705da959.slice/crio-da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e WatchSource:0}: Error finding container da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e: Status 404 returned error can't find the container with id da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.209096 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerStarted","Data":"cd0c7882f0bf4b45a9cb1fa534dca7fa45e272c5fcc3ce1af463ebaf5275a0e5"} Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.209367 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerStarted","Data":"da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e"} Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.210948 4856 generic.go:334] "Generic (PLEG): container finished" podID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerID="fe917ecc33adf94f591741da7a6c7bde787fe7afc618ad544c9b0fafe33c167e" exitCode=0 Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.210992 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" event={"ID":"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80","Type":"ContainerDied","Data":"fe917ecc33adf94f591741da7a6c7bde787fe7afc618ad544c9b0fafe33c167e"} Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.211020 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" event={"ID":"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80","Type":"ContainerStarted","Data":"bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb"} Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.408319 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn"] Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.410359 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.419721 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn"] Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.572022 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.572081 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.572131 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqrbb\" (UniqueName: \"kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.673030 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.673100 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.673155 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqrbb\" (UniqueName: \"kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.673504 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.673552 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.690021 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqrbb\" (UniqueName: \"kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:37 crc kubenswrapper[4856]: I1202 00:21:37.734794 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:38 crc kubenswrapper[4856]: I1202 00:21:38.195027 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn"] Dec 02 00:21:38 crc kubenswrapper[4856]: W1202 00:21:38.208701 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08213270_abc5_4c81_80ee_76d7962e5890.slice/crio-67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468 WatchSource:0}: Error finding container 67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468: Status 404 returned error can't find the container with id 67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468 Dec 02 00:21:38 crc kubenswrapper[4856]: I1202 00:21:38.224427 4856 generic.go:334] "Generic (PLEG): container finished" podID="d110afca-827d-4691-9e3a-5804705da959" containerID="cd0c7882f0bf4b45a9cb1fa534dca7fa45e272c5fcc3ce1af463ebaf5275a0e5" exitCode=0 Dec 02 00:21:38 crc kubenswrapper[4856]: I1202 00:21:38.224528 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerDied","Data":"cd0c7882f0bf4b45a9cb1fa534dca7fa45e272c5fcc3ce1af463ebaf5275a0e5"} Dec 02 00:21:38 crc kubenswrapper[4856]: I1202 00:21:38.226442 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" event={"ID":"08213270-abc5-4c81-80ee-76d7962e5890","Type":"ContainerStarted","Data":"67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468"} Dec 02 00:21:39 crc kubenswrapper[4856]: I1202 00:21:39.232006 4856 generic.go:334] "Generic (PLEG): container finished" podID="08213270-abc5-4c81-80ee-76d7962e5890" containerID="723b04f93384d4c54e009641a2fc4c6aa67fed13d9f57fd0467226a3503a8912" exitCode=0 Dec 02 00:21:39 crc kubenswrapper[4856]: I1202 00:21:39.232113 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" event={"ID":"08213270-abc5-4c81-80ee-76d7962e5890","Type":"ContainerDied","Data":"723b04f93384d4c54e009641a2fc4c6aa67fed13d9f57fd0467226a3503a8912"} Dec 02 00:21:39 crc kubenswrapper[4856]: I1202 00:21:39.239716 4856 generic.go:334] "Generic (PLEG): container finished" podID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerID="9fc4a64e3019898f73bcb62d4b92c323702c64e57f9974a2261dcb6087ebd6e0" exitCode=0 Dec 02 00:21:39 crc kubenswrapper[4856]: I1202 00:21:39.239753 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" event={"ID":"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80","Type":"ContainerDied","Data":"9fc4a64e3019898f73bcb62d4b92c323702c64e57f9974a2261dcb6087ebd6e0"} Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.247675 4856 generic.go:334] "Generic (PLEG): container finished" podID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerID="ba332baf26197489510adc91a2a8c6e948a36b4f66e81c578266082e2e0a84c1" exitCode=0 Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.247799 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" event={"ID":"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80","Type":"ContainerDied","Data":"ba332baf26197489510adc91a2a8c6e948a36b4f66e81c578266082e2e0a84c1"} Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.249420 4856 generic.go:334] "Generic (PLEG): container finished" podID="08213270-abc5-4c81-80ee-76d7962e5890" containerID="62f60dd25460d692247cca4602a9fd9003a436fcf912bbd81618255a6b62b655" exitCode=0 Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.249497 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" event={"ID":"08213270-abc5-4c81-80ee-76d7962e5890","Type":"ContainerDied","Data":"62f60dd25460d692247cca4602a9fd9003a436fcf912bbd81618255a6b62b655"} Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.251513 4856 generic.go:334] "Generic (PLEG): container finished" podID="d110afca-827d-4691-9e3a-5804705da959" containerID="24251c862d7f8fd499038d4b1912098be3552261cf5b4cce37745c180567b3d7" exitCode=0 Dec 02 00:21:40 crc kubenswrapper[4856]: I1202 00:21:40.251545 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerDied","Data":"24251c862d7f8fd499038d4b1912098be3552261cf5b4cce37745c180567b3d7"} Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.265966 4856 generic.go:334] "Generic (PLEG): container finished" podID="08213270-abc5-4c81-80ee-76d7962e5890" containerID="caad8d041a5c40f55592b04a568b125040d4158f512bb6030289c9cb5b2845f6" exitCode=0 Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.266152 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" event={"ID":"08213270-abc5-4c81-80ee-76d7962e5890","Type":"ContainerDied","Data":"caad8d041a5c40f55592b04a568b125040d4158f512bb6030289c9cb5b2845f6"} Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.267615 4856 generic.go:334] "Generic (PLEG): container finished" podID="d110afca-827d-4691-9e3a-5804705da959" containerID="ccbe75dc26c31dd40e254799db75d87ec0232a123a1e31d5359f0ccf3c99e874" exitCode=0 Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.267708 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerDied","Data":"ccbe75dc26c31dd40e254799db75d87ec0232a123a1e31d5359f0ccf3c99e874"} Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.691439 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.827704 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle\") pod \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.828056 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util\") pod \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.828085 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f5r8\" (UniqueName: \"kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8\") pod \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\" (UID: \"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80\") " Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.828206 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle" (OuterVolumeSpecName: "bundle") pod "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" (UID: "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.828539 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.833171 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8" (OuterVolumeSpecName: "kube-api-access-6f5r8") pod "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" (UID: "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80"). InnerVolumeSpecName "kube-api-access-6f5r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.842637 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util" (OuterVolumeSpecName: "util") pod "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" (UID: "cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.930189 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:41 crc kubenswrapper[4856]: I1202 00:21:41.930227 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f5r8\" (UniqueName: \"kubernetes.io/projected/cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80-kube-api-access-6f5r8\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.278117 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" event={"ID":"cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80","Type":"ContainerDied","Data":"bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb"} Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.278182 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb1f6f48ebafe94cb860031e496316148a69eae749531db37adf874c53d399eb" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.278195 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebw599j" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.613208 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.618927 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740701 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util\") pod \"08213270-abc5-4c81-80ee-76d7962e5890\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740781 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle\") pod \"d110afca-827d-4691-9e3a-5804705da959\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740828 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqrbb\" (UniqueName: \"kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb\") pod \"08213270-abc5-4c81-80ee-76d7962e5890\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740852 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle\") pod \"08213270-abc5-4c81-80ee-76d7962e5890\" (UID: \"08213270-abc5-4c81-80ee-76d7962e5890\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740885 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz9kt\" (UniqueName: \"kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt\") pod \"d110afca-827d-4691-9e3a-5804705da959\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.740961 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util\") pod \"d110afca-827d-4691-9e3a-5804705da959\" (UID: \"d110afca-827d-4691-9e3a-5804705da959\") " Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.741740 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle" (OuterVolumeSpecName: "bundle") pod "d110afca-827d-4691-9e3a-5804705da959" (UID: "d110afca-827d-4691-9e3a-5804705da959"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.742471 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle" (OuterVolumeSpecName: "bundle") pod "08213270-abc5-4c81-80ee-76d7962e5890" (UID: "08213270-abc5-4c81-80ee-76d7962e5890"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.744828 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt" (OuterVolumeSpecName: "kube-api-access-sz9kt") pod "d110afca-827d-4691-9e3a-5804705da959" (UID: "d110afca-827d-4691-9e3a-5804705da959"). InnerVolumeSpecName "kube-api-access-sz9kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.746733 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb" (OuterVolumeSpecName: "kube-api-access-nqrbb") pod "08213270-abc5-4c81-80ee-76d7962e5890" (UID: "08213270-abc5-4c81-80ee-76d7962e5890"). InnerVolumeSpecName "kube-api-access-nqrbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.767506 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util" (OuterVolumeSpecName: "util") pod "08213270-abc5-4c81-80ee-76d7962e5890" (UID: "08213270-abc5-4c81-80ee-76d7962e5890"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.804117 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util" (OuterVolumeSpecName: "util") pod "d110afca-827d-4691-9e3a-5804705da959" (UID: "d110afca-827d-4691-9e3a-5804705da959"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.841994 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.842030 4856 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-util\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.842040 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d110afca-827d-4691-9e3a-5804705da959-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.842050 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqrbb\" (UniqueName: \"kubernetes.io/projected/08213270-abc5-4c81-80ee-76d7962e5890-kube-api-access-nqrbb\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.842062 4856 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08213270-abc5-4c81-80ee-76d7962e5890-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:42 crc kubenswrapper[4856]: I1202 00:21:42.842069 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz9kt\" (UniqueName: \"kubernetes.io/projected/d110afca-827d-4691-9e3a-5804705da959-kube-api-access-sz9kt\") on node \"crc\" DevicePath \"\"" Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.285917 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" event={"ID":"08213270-abc5-4c81-80ee-76d7962e5890","Type":"ContainerDied","Data":"67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468"} Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.285957 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bq66cn" Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.285970 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67178b3f07f9068a0856f52ff3974f028b8f5196084cf3eb43ca495af2de1468" Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.289915 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" event={"ID":"d110afca-827d-4691-9e3a-5804705da959","Type":"ContainerDied","Data":"da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e"} Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.289950 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da88f13a247147fdf15b1249e5f0578da986c02f36b3dfde560964f0ef17ed7e" Dec 02 00:21:43 crc kubenswrapper[4856]: I1202 00:21:43.289969 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.440298 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-6d757dbf4c-c925h"] Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441212 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441232 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441249 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441257 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441278 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441288 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441302 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441309 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441324 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441331 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441342 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441350 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="util" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441362 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441369 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441382 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441389 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: E1202 00:21:49.441398 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441407 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="pull" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441552 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="08213270-abc5-4c81-80ee-76d7962e5890" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441570 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd6195e5-e415-4aff-b4cd-d9e2bbb3dd80" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.441649 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="d110afca-827d-4691-9e3a-5804705da959" containerName="extract" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.442304 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.445305 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-8tfvw" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.466000 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-6d757dbf4c-c925h"] Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.552118 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/0ad34512-956b-4bf0-a54c-5d44f734a857-runner\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.552209 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngsf5\" (UniqueName: \"kubernetes.io/projected/0ad34512-956b-4bf0-a54c-5d44f734a857-kube-api-access-ngsf5\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.653545 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngsf5\" (UniqueName: \"kubernetes.io/projected/0ad34512-956b-4bf0-a54c-5d44f734a857-kube-api-access-ngsf5\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.653665 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/0ad34512-956b-4bf0-a54c-5d44f734a857-runner\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.654267 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/0ad34512-956b-4bf0-a54c-5d44f734a857-runner\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.673706 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngsf5\" (UniqueName: \"kubernetes.io/projected/0ad34512-956b-4bf0-a54c-5d44f734a857-kube-api-access-ngsf5\") pod \"smart-gateway-operator-6d757dbf4c-c925h\" (UID: \"0ad34512-956b-4bf0-a54c-5d44f734a857\") " pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:49 crc kubenswrapper[4856]: I1202 00:21:49.759394 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" Dec 02 00:21:50 crc kubenswrapper[4856]: I1202 00:21:50.229243 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-6d757dbf4c-c925h"] Dec 02 00:21:50 crc kubenswrapper[4856]: W1202 00:21:50.233901 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ad34512_956b_4bf0_a54c_5d44f734a857.slice/crio-abc0335ecfdebb1d624065d97cd6e8bfebdb6737b9da9cd7821679ed4df69f8c WatchSource:0}: Error finding container abc0335ecfdebb1d624065d97cd6e8bfebdb6737b9da9cd7821679ed4df69f8c: Status 404 returned error can't find the container with id abc0335ecfdebb1d624065d97cd6e8bfebdb6737b9da9cd7821679ed4df69f8c Dec 02 00:21:50 crc kubenswrapper[4856]: I1202 00:21:50.334843 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" event={"ID":"0ad34512-956b-4bf0-a54c-5d44f734a857","Type":"ContainerStarted","Data":"abc0335ecfdebb1d624065d97cd6e8bfebdb6737b9da9cd7821679ed4df69f8c"} Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.147835 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-v9wkz"] Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.149294 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.155231 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-jgnfb" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.177408 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-v9wkz"] Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.310870 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72kvj\" (UniqueName: \"kubernetes.io/projected/906e770b-3922-4404-96bd-dae951a9245e-kube-api-access-72kvj\") pod \"interconnect-operator-5bb49f789d-v9wkz\" (UID: \"906e770b-3922-4404-96bd-dae951a9245e\") " pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.412358 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72kvj\" (UniqueName: \"kubernetes.io/projected/906e770b-3922-4404-96bd-dae951a9245e-kube-api-access-72kvj\") pod \"interconnect-operator-5bb49f789d-v9wkz\" (UID: \"906e770b-3922-4404-96bd-dae951a9245e\") " pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.439652 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72kvj\" (UniqueName: \"kubernetes.io/projected/906e770b-3922-4404-96bd-dae951a9245e-kube-api-access-72kvj\") pod \"interconnect-operator-5bb49f789d-v9wkz\" (UID: \"906e770b-3922-4404-96bd-dae951a9245e\") " pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.472087 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" Dec 02 00:21:52 crc kubenswrapper[4856]: I1202 00:21:52.932934 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-v9wkz"] Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.275339 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-dd9844f47-gmq8n"] Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.276205 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.283577 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-9vs7x" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.304618 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-dd9844f47-gmq8n"] Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.343857 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/37afc025-524b-44e8-a79b-1390846f28bb-runner\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.344013 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7trdm\" (UniqueName: \"kubernetes.io/projected/37afc025-524b-44e8-a79b-1390846f28bb-kube-api-access-7trdm\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.387386 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" event={"ID":"906e770b-3922-4404-96bd-dae951a9245e","Type":"ContainerStarted","Data":"2af1930a2e310279be8773387a4dd6cbbc21d27cff2161625f8550ac10dc1bda"} Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.444810 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/37afc025-524b-44e8-a79b-1390846f28bb-runner\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.444905 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7trdm\" (UniqueName: \"kubernetes.io/projected/37afc025-524b-44e8-a79b-1390846f28bb-kube-api-access-7trdm\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.445575 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/37afc025-524b-44e8-a79b-1390846f28bb-runner\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.473830 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7trdm\" (UniqueName: \"kubernetes.io/projected/37afc025-524b-44e8-a79b-1390846f28bb-kube-api-access-7trdm\") pod \"service-telemetry-operator-dd9844f47-gmq8n\" (UID: \"37afc025-524b-44e8-a79b-1390846f28bb\") " pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:53 crc kubenswrapper[4856]: I1202 00:21:53.603342 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" Dec 02 00:21:54 crc kubenswrapper[4856]: I1202 00:21:54.045227 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-dd9844f47-gmq8n"] Dec 02 00:21:54 crc kubenswrapper[4856]: I1202 00:21:54.410561 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" event={"ID":"37afc025-524b-44e8-a79b-1390846f28bb","Type":"ContainerStarted","Data":"3d6ef8b3a579281cd136c28f4fef80d3adee48cc1d835e814feef8ce2e7b18b3"} Dec 02 00:22:05 crc kubenswrapper[4856]: I1202 00:22:05.061816 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:22:05 crc kubenswrapper[4856]: I1202 00:22:05.062435 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:22:15 crc kubenswrapper[4856]: E1202 00:22:15.177411 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Dec 02 00:22:15 crc kubenswrapper[4856]: E1202 00:22:15.178384 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1764629124,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ngsf5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-6d757dbf4c-c925h_service-telemetry(0ad34512-956b-4bf0-a54c-5d44f734a857): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 00:22:15 crc kubenswrapper[4856]: E1202 00:22:15.181350 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" podUID="0ad34512-956b-4bf0-a54c-5d44f734a857" Dec 02 00:22:15 crc kubenswrapper[4856]: I1202 00:22:15.553452 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" event={"ID":"906e770b-3922-4404-96bd-dae951a9245e","Type":"ContainerStarted","Data":"8d43ec6f84c31aad02e0256b449f4bc715d3d085ca6babab2eb7ca09a83e90cd"} Dec 02 00:22:15 crc kubenswrapper[4856]: E1202 00:22:15.553935 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" podUID="0ad34512-956b-4bf0-a54c-5d44f734a857" Dec 02 00:22:15 crc kubenswrapper[4856]: I1202 00:22:15.569563 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-v9wkz" podStartSLOduration=5.252626589 podStartE2EDuration="23.569547663s" podCreationTimestamp="2025-12-02 00:21:52 +0000 UTC" firstStartedPulling="2025-12-02 00:21:52.959121224 +0000 UTC m=+939.985489228" lastFinishedPulling="2025-12-02 00:22:11.276042298 +0000 UTC m=+958.302410302" observedRunningTime="2025-12-02 00:22:15.568422545 +0000 UTC m=+962.594790549" watchObservedRunningTime="2025-12-02 00:22:15.569547663 +0000 UTC m=+962.595915657" Dec 02 00:22:20 crc kubenswrapper[4856]: I1202 00:22:20.585105 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" event={"ID":"37afc025-524b-44e8-a79b-1390846f28bb","Type":"ContainerStarted","Data":"0161c95ebc4bb1bdb6ec355f11ffd0aaf2ad29a61a4305b684ec458e680e40c2"} Dec 02 00:22:20 crc kubenswrapper[4856]: I1202 00:22:20.603737 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-dd9844f47-gmq8n" podStartSLOduration=1.849433799 podStartE2EDuration="27.603708023s" podCreationTimestamp="2025-12-02 00:21:53 +0000 UTC" firstStartedPulling="2025-12-02 00:21:54.063950696 +0000 UTC m=+941.090318700" lastFinishedPulling="2025-12-02 00:22:19.81822492 +0000 UTC m=+966.844592924" observedRunningTime="2025-12-02 00:22:20.601744915 +0000 UTC m=+967.628112919" watchObservedRunningTime="2025-12-02 00:22:20.603708023 +0000 UTC m=+967.630076027" Dec 02 00:22:31 crc kubenswrapper[4856]: I1202 00:22:31.666892 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" event={"ID":"0ad34512-956b-4bf0-a54c-5d44f734a857","Type":"ContainerStarted","Data":"a7a8f583adb15b6532fb82a829804edeaa4ca548496f14bc5ecd83b06c57629e"} Dec 02 00:22:31 crc kubenswrapper[4856]: I1202 00:22:31.683714 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-6d757dbf4c-c925h" podStartSLOduration=2.021530593 podStartE2EDuration="42.683698502s" podCreationTimestamp="2025-12-02 00:21:49 +0000 UTC" firstStartedPulling="2025-12-02 00:21:50.235154169 +0000 UTC m=+937.261522163" lastFinishedPulling="2025-12-02 00:22:30.897322038 +0000 UTC m=+977.923690072" observedRunningTime="2025-12-02 00:22:31.681474337 +0000 UTC m=+978.707842341" watchObservedRunningTime="2025-12-02 00:22:31.683698502 +0000 UTC m=+978.710066506" Dec 02 00:22:35 crc kubenswrapper[4856]: I1202 00:22:35.062286 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:22:35 crc kubenswrapper[4856]: I1202 00:22:35.063202 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.663059 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.664510 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.667681 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.668015 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.668715 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-m8bqf" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.668956 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.669176 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.669362 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.669517 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.686738 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775521 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775559 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775660 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcfqk\" (UniqueName: \"kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775798 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775824 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.775885 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.776008 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877189 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877256 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877278 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877297 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcfqk\" (UniqueName: \"kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877354 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877377 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.877420 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.878241 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.889339 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.889365 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.889371 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.890375 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.890375 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.899188 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcfqk\" (UniqueName: \"kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk\") pod \"default-interconnect-68864d46cb-hmpjc\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:41 crc kubenswrapper[4856]: I1202 00:22:41.985725 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:22:42 crc kubenswrapper[4856]: I1202 00:22:42.401112 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:22:42 crc kubenswrapper[4856]: I1202 00:22:42.757069 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" event={"ID":"1a61e765-280e-415c-96a5-797fc8a3d133","Type":"ContainerStarted","Data":"38f199763791e6cd9af5a7ef4b6500b254ef5c2520ca1eaec0f46f3c8ba0b79b"} Dec 02 00:22:47 crc kubenswrapper[4856]: I1202 00:22:47.793898 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" event={"ID":"1a61e765-280e-415c-96a5-797fc8a3d133","Type":"ContainerStarted","Data":"c40c3c7978c9efbb6efb68f09fd6563ab73b3fe41db7d4ad08511933933db714"} Dec 02 00:22:47 crc kubenswrapper[4856]: I1202 00:22:47.821116 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" podStartSLOduration=2.390374655 podStartE2EDuration="6.82109572s" podCreationTimestamp="2025-12-02 00:22:41 +0000 UTC" firstStartedPulling="2025-12-02 00:22:42.411698123 +0000 UTC m=+989.438066147" lastFinishedPulling="2025-12-02 00:22:46.842419208 +0000 UTC m=+993.868787212" observedRunningTime="2025-12-02 00:22:47.816732113 +0000 UTC m=+994.843100117" watchObservedRunningTime="2025-12-02 00:22:47.82109572 +0000 UTC m=+994.847463734" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.315691 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.317407 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.321558 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322021 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322061 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322062 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322137 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-8v7mg" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322264 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322333 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.322418 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.338801 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428331 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config-out\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428373 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gglz8\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-kube-api-access-gglz8\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428409 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428428 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428446 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428627 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428680 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-web-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428841 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.428956 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-tls-assets\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.429115 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.530813 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.530861 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-tls-assets\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.530904 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.530946 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config-out\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.530974 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gglz8\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-kube-api-access-gglz8\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531009 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531030 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531051 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531093 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531118 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-web-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.531718 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: E1202 00:22:52.531738 4856 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 02 00:22:52 crc kubenswrapper[4856]: E1202 00:22:52.532171 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls podName:dc28fa42-75c6-4d34-8d03-6e759368f5e5 nodeName:}" failed. No retries permitted until 2025-12-02 00:22:53.032149559 +0000 UTC m=+1000.058517583 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "dc28fa42-75c6-4d34-8d03-6e759368f5e5") : secret "default-prometheus-proxy-tls" not found Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.532520 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc28fa42-75c6-4d34-8d03-6e759368f5e5-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.536946 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config-out\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.537524 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-web-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.537753 4856 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.537813 4856 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9beb422235cbe3c1332fec24729613d35d7fdae0766fedce8f43f2e0368fcbaf/globalmount\"" pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.538243 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.538604 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-tls-assets\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.545548 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-config\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.561551 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edfe46f7-ff1f-4ae2-9ec7-7d7d45609d69\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:52 crc kubenswrapper[4856]: I1202 00:22:52.562363 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gglz8\" (UniqueName: \"kubernetes.io/projected/dc28fa42-75c6-4d34-8d03-6e759368f5e5-kube-api-access-gglz8\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:53 crc kubenswrapper[4856]: I1202 00:22:53.037028 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:53 crc kubenswrapper[4856]: E1202 00:22:53.037231 4856 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 02 00:22:53 crc kubenswrapper[4856]: E1202 00:22:53.037313 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls podName:dc28fa42-75c6-4d34-8d03-6e759368f5e5 nodeName:}" failed. No retries permitted until 2025-12-02 00:22:54.037290267 +0000 UTC m=+1001.063658271 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "dc28fa42-75c6-4d34-8d03-6e759368f5e5") : secret "default-prometheus-proxy-tls" not found Dec 02 00:22:54 crc kubenswrapper[4856]: I1202 00:22:54.057824 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:54 crc kubenswrapper[4856]: I1202 00:22:54.080216 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/dc28fa42-75c6-4d34-8d03-6e759368f5e5-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"dc28fa42-75c6-4d34-8d03-6e759368f5e5\") " pod="service-telemetry/prometheus-default-0" Dec 02 00:22:54 crc kubenswrapper[4856]: I1202 00:22:54.133451 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 02 00:22:54 crc kubenswrapper[4856]: I1202 00:22:54.421303 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 02 00:22:54 crc kubenswrapper[4856]: W1202 00:22:54.432417 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc28fa42_75c6_4d34_8d03_6e759368f5e5.slice/crio-2ee9be6464a9dee7ac30c752e7e30ff31fc21dac8cb9399d2e5a91cff33b2162 WatchSource:0}: Error finding container 2ee9be6464a9dee7ac30c752e7e30ff31fc21dac8cb9399d2e5a91cff33b2162: Status 404 returned error can't find the container with id 2ee9be6464a9dee7ac30c752e7e30ff31fc21dac8cb9399d2e5a91cff33b2162 Dec 02 00:22:54 crc kubenswrapper[4856]: I1202 00:22:54.848961 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerStarted","Data":"2ee9be6464a9dee7ac30c752e7e30ff31fc21dac8cb9399d2e5a91cff33b2162"} Dec 02 00:22:58 crc kubenswrapper[4856]: I1202 00:22:58.881853 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerStarted","Data":"afe3df60027792c29f752c299fc48b60989a5621342055505c88a1ba6cf524fd"} Dec 02 00:23:02 crc kubenswrapper[4856]: I1202 00:23:02.796941 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-42chm"] Dec 02 00:23:02 crc kubenswrapper[4856]: I1202 00:23:02.797928 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" Dec 02 00:23:02 crc kubenswrapper[4856]: I1202 00:23:02.840007 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-42chm"] Dec 02 00:23:02 crc kubenswrapper[4856]: I1202 00:23:02.890564 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgjmf\" (UniqueName: \"kubernetes.io/projected/df74219e-9434-4ae0-a7d9-61bfdad38211-kube-api-access-vgjmf\") pod \"default-snmp-webhook-78bcbbdcff-42chm\" (UID: \"df74219e-9434-4ae0-a7d9-61bfdad38211\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" Dec 02 00:23:02 crc kubenswrapper[4856]: I1202 00:23:02.991910 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgjmf\" (UniqueName: \"kubernetes.io/projected/df74219e-9434-4ae0-a7d9-61bfdad38211-kube-api-access-vgjmf\") pod \"default-snmp-webhook-78bcbbdcff-42chm\" (UID: \"df74219e-9434-4ae0-a7d9-61bfdad38211\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" Dec 02 00:23:03 crc kubenswrapper[4856]: I1202 00:23:03.010660 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgjmf\" (UniqueName: \"kubernetes.io/projected/df74219e-9434-4ae0-a7d9-61bfdad38211-kube-api-access-vgjmf\") pod \"default-snmp-webhook-78bcbbdcff-42chm\" (UID: \"df74219e-9434-4ae0-a7d9-61bfdad38211\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" Dec 02 00:23:03 crc kubenswrapper[4856]: I1202 00:23:03.158312 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" Dec 02 00:23:03 crc kubenswrapper[4856]: I1202 00:23:03.591002 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-42chm"] Dec 02 00:23:03 crc kubenswrapper[4856]: W1202 00:23:03.597155 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf74219e_9434_4ae0_a7d9_61bfdad38211.slice/crio-caa9967a51cb63e3cfbfa59d12835cf32e7e29807e54c447021db83fafa28845 WatchSource:0}: Error finding container caa9967a51cb63e3cfbfa59d12835cf32e7e29807e54c447021db83fafa28845: Status 404 returned error can't find the container with id caa9967a51cb63e3cfbfa59d12835cf32e7e29807e54c447021db83fafa28845 Dec 02 00:23:03 crc kubenswrapper[4856]: I1202 00:23:03.915872 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" event={"ID":"df74219e-9434-4ae0-a7d9-61bfdad38211","Type":"ContainerStarted","Data":"caa9967a51cb63e3cfbfa59d12835cf32e7e29807e54c447021db83fafa28845"} Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.062345 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.062399 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.062440 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.062870 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.062920 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5" gracePeriod=600 Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.935718 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5" exitCode=0 Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.936311 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5"} Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.936344 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924"} Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.936364 4856 scope.go:117] "RemoveContainer" containerID="bc5c4932251b6c6119e7f7784c57be6ad9b00828d16d6b292535c8fefb264f0e" Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.939368 4856 generic.go:334] "Generic (PLEG): container finished" podID="dc28fa42-75c6-4d34-8d03-6e759368f5e5" containerID="afe3df60027792c29f752c299fc48b60989a5621342055505c88a1ba6cf524fd" exitCode=0 Dec 02 00:23:05 crc kubenswrapper[4856]: I1202 00:23:05.939397 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerDied","Data":"afe3df60027792c29f752c299fc48b60989a5621342055505c88a1ba6cf524fd"} Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.579666 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.581010 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.586778 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-rtx89" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.586786 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.586996 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.587471 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.587520 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.587542 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.592113 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.738748 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-out\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739099 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-tls-assets\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739131 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739161 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz6tt\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-kube-api-access-mz6tt\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739185 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739209 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739236 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-volume\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739258 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.739284 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-web-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.840952 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841006 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-volume\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841033 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841058 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-web-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841082 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-out\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841123 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-tls-assets\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841148 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841173 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz6tt\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-kube-api-access-mz6tt\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.841197 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: E1202 00:23:06.841225 4856 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:06 crc kubenswrapper[4856]: E1202 00:23:06.841302 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls podName:9c32b620-4831-417b-b2bb-5bc26dc65c4d nodeName:}" failed. No retries permitted until 2025-12-02 00:23:07.341282442 +0000 UTC m=+1014.367650436 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "9c32b620-4831-417b-b2bb-5bc26dc65c4d") : secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.847862 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.847874 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-web-config\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.847955 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-out\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.849013 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-config-volume\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.849611 4856 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.849650 4856 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6e85af1222cb60e3ecff6fe424d060f24382c576b44ab7b689c94f51e57ceed1/globalmount\"" pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.850262 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-tls-assets\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.851093 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.856730 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz6tt\" (UniqueName: \"kubernetes.io/projected/9c32b620-4831-417b-b2bb-5bc26dc65c4d-kube-api-access-mz6tt\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:06 crc kubenswrapper[4856]: I1202 00:23:06.879303 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1ea09c19-b9f3-468c-83ab-1958b96c344f\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:07 crc kubenswrapper[4856]: I1202 00:23:07.348107 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:07 crc kubenswrapper[4856]: E1202 00:23:07.348260 4856 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:07 crc kubenswrapper[4856]: E1202 00:23:07.348339 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls podName:9c32b620-4831-417b-b2bb-5bc26dc65c4d nodeName:}" failed. No retries permitted until 2025-12-02 00:23:08.348326117 +0000 UTC m=+1015.374694121 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "9c32b620-4831-417b-b2bb-5bc26dc65c4d") : secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:08 crc kubenswrapper[4856]: I1202 00:23:08.361603 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:08 crc kubenswrapper[4856]: E1202 00:23:08.362014 4856 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:08 crc kubenswrapper[4856]: E1202 00:23:08.362065 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls podName:9c32b620-4831-417b-b2bb-5bc26dc65c4d nodeName:}" failed. No retries permitted until 2025-12-02 00:23:10.362050947 +0000 UTC m=+1017.388418941 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "9c32b620-4831-417b-b2bb-5bc26dc65c4d") : secret "default-alertmanager-proxy-tls" not found Dec 02 00:23:10 crc kubenswrapper[4856]: I1202 00:23:10.392613 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:10 crc kubenswrapper[4856]: I1202 00:23:10.398751 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9c32b620-4831-417b-b2bb-5bc26dc65c4d-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"9c32b620-4831-417b-b2bb-5bc26dc65c4d\") " pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:10 crc kubenswrapper[4856]: I1202 00:23:10.540648 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 02 00:23:11 crc kubenswrapper[4856]: I1202 00:23:11.226059 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 02 00:23:11 crc kubenswrapper[4856]: I1202 00:23:11.998687 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerStarted","Data":"d29fc46bbf040b79cf5caf7137a209830cb27f5f66da57b3fba7f460bf74b214"} Dec 02 00:23:12 crc kubenswrapper[4856]: I1202 00:23:12.002962 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" event={"ID":"df74219e-9434-4ae0-a7d9-61bfdad38211","Type":"ContainerStarted","Data":"1c0aa1ff65b3228f3d57c927b76e388028d27d1da0c2071b0e323bb8d703899b"} Dec 02 00:23:12 crc kubenswrapper[4856]: I1202 00:23:12.024032 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-42chm" podStartSLOduration=2.320282057 podStartE2EDuration="10.024002049s" podCreationTimestamp="2025-12-02 00:23:02 +0000 UTC" firstStartedPulling="2025-12-02 00:23:03.600084983 +0000 UTC m=+1010.626452987" lastFinishedPulling="2025-12-02 00:23:11.303804975 +0000 UTC m=+1018.330172979" observedRunningTime="2025-12-02 00:23:12.016351541 +0000 UTC m=+1019.042719545" watchObservedRunningTime="2025-12-02 00:23:12.024002049 +0000 UTC m=+1019.050370053" Dec 02 00:23:13 crc kubenswrapper[4856]: I1202 00:23:13.013803 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerStarted","Data":"20f9615864cd7854dd7e5877c1b6c3d2c600c0bf7598d920bfd051eb921715f4"} Dec 02 00:23:17 crc kubenswrapper[4856]: I1202 00:23:17.081778 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerStarted","Data":"03140888cd8341f4fe0988385a68e5dadf397123c9ea7e15df0be9fb5ace2f30"} Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.329543 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw"] Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.335351 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.338270 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.338311 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.338400 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.338271 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-x2bnp" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.348911 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw"] Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.432128 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.432200 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62swn\" (UniqueName: \"kubernetes.io/projected/f8c47b36-32a8-4485-b337-3b92535e0875-kube-api-access-62swn\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.432263 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f8c47b36-32a8-4485-b337-3b92535e0875-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.432322 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f8c47b36-32a8-4485-b337-3b92535e0875-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.432347 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.534356 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.534421 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62swn\" (UniqueName: \"kubernetes.io/projected/f8c47b36-32a8-4485-b337-3b92535e0875-kube-api-access-62swn\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.534456 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f8c47b36-32a8-4485-b337-3b92535e0875-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.534494 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f8c47b36-32a8-4485-b337-3b92535e0875-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.534523 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: E1202 00:23:19.534682 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 02 00:23:19 crc kubenswrapper[4856]: E1202 00:23:19.534752 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls podName:f8c47b36-32a8-4485-b337-3b92535e0875 nodeName:}" failed. No retries permitted until 2025-12-02 00:23:20.034734727 +0000 UTC m=+1027.061102731 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" (UID: "f8c47b36-32a8-4485-b337-3b92535e0875") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.535164 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f8c47b36-32a8-4485-b337-3b92535e0875-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.535677 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f8c47b36-32a8-4485-b337-3b92535e0875-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.542330 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:19 crc kubenswrapper[4856]: I1202 00:23:19.556452 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62swn\" (UniqueName: \"kubernetes.io/projected/f8c47b36-32a8-4485-b337-3b92535e0875-kube-api-access-62swn\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:20 crc kubenswrapper[4856]: I1202 00:23:20.040432 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:20 crc kubenswrapper[4856]: E1202 00:23:20.040604 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 02 00:23:20 crc kubenswrapper[4856]: E1202 00:23:20.040681 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls podName:f8c47b36-32a8-4485-b337-3b92535e0875 nodeName:}" failed. No retries permitted until 2025-12-02 00:23:21.040653924 +0000 UTC m=+1028.067021928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" (UID: "f8c47b36-32a8-4485-b337-3b92535e0875") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 02 00:23:21 crc kubenswrapper[4856]: I1202 00:23:21.054350 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:21 crc kubenswrapper[4856]: I1202 00:23:21.059532 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8c47b36-32a8-4485-b337-3b92535e0875-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw\" (UID: \"f8c47b36-32a8-4485-b337-3b92535e0875\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:21 crc kubenswrapper[4856]: I1202 00:23:21.173564 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" Dec 02 00:23:21 crc kubenswrapper[4856]: I1202 00:23:21.645112 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw"] Dec 02 00:23:22 crc kubenswrapper[4856]: I1202 00:23:22.121050 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerStarted","Data":"150e767cbf38973da6a689b1b0d26a451b6d7cc9bc3533324df017dfbf791bdf"} Dec 02 00:23:22 crc kubenswrapper[4856]: I1202 00:23:22.122519 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"401951e1d7257ab67d07f726279ec4c4c611fa3194080cca72b2216cc470d957"} Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.144733 4856 generic.go:334] "Generic (PLEG): container finished" podID="9c32b620-4831-417b-b2bb-5bc26dc65c4d" containerID="20f9615864cd7854dd7e5877c1b6c3d2c600c0bf7598d920bfd051eb921715f4" exitCode=0 Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.144780 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerDied","Data":"20f9615864cd7854dd7e5877c1b6c3d2c600c0bf7598d920bfd051eb921715f4"} Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.403649 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg"] Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.405673 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.410852 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.411425 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.427306 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg"] Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.596645 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.597011 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.597038 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.597097 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.597151 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f4km\" (UniqueName: \"kubernetes.io/projected/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-kube-api-access-8f4km\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.699099 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.699200 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.699231 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.699282 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.699307 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f4km\" (UniqueName: \"kubernetes.io/projected/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-kube-api-access-8f4km\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: E1202 00:23:23.699305 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 02 00:23:23 crc kubenswrapper[4856]: E1202 00:23:23.699399 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls podName:16a2092a-9a92-431a-9cb4-2c760fb5b5f6 nodeName:}" failed. No retries permitted until 2025-12-02 00:23:24.199374447 +0000 UTC m=+1031.225742441 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" (UID: "16a2092a-9a92-431a-9cb4-2c760fb5b5f6") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.700774 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.700811 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.715274 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:23 crc kubenswrapper[4856]: I1202 00:23:23.716640 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f4km\" (UniqueName: \"kubernetes.io/projected/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-kube-api-access-8f4km\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:24 crc kubenswrapper[4856]: I1202 00:23:24.206002 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:24 crc kubenswrapper[4856]: E1202 00:23:24.206211 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 02 00:23:24 crc kubenswrapper[4856]: E1202 00:23:24.206273 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls podName:16a2092a-9a92-431a-9cb4-2c760fb5b5f6 nodeName:}" failed. No retries permitted until 2025-12-02 00:23:25.206243158 +0000 UTC m=+1032.232611152 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" (UID: "16a2092a-9a92-431a-9cb4-2c760fb5b5f6") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 02 00:23:25 crc kubenswrapper[4856]: I1202 00:23:25.221546 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:25 crc kubenswrapper[4856]: I1202 00:23:25.232892 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16a2092a-9a92-431a-9cb4-2c760fb5b5f6-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg\" (UID: \"16a2092a-9a92-431a-9cb4-2c760fb5b5f6\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:25 crc kubenswrapper[4856]: I1202 00:23:25.269478 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" Dec 02 00:23:26 crc kubenswrapper[4856]: I1202 00:23:26.898952 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p"] Dec 02 00:23:26 crc kubenswrapper[4856]: I1202 00:23:26.900731 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:26 crc kubenswrapper[4856]: I1202 00:23:26.903291 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Dec 02 00:23:26 crc kubenswrapper[4856]: I1202 00:23:26.903423 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Dec 02 00:23:26 crc kubenswrapper[4856]: I1202 00:23:26.907126 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p"] Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.048359 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz975\" (UniqueName: \"kubernetes.io/projected/7896ff15-1225-4a83-898d-3c9166555c2c-kube-api-access-kz975\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.048479 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/7896ff15-1225-4a83-898d-3c9166555c2c-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.048552 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.048655 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.048700 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/7896ff15-1225-4a83-898d-3c9166555c2c-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.150714 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz975\" (UniqueName: \"kubernetes.io/projected/7896ff15-1225-4a83-898d-3c9166555c2c-kube-api-access-kz975\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.150818 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/7896ff15-1225-4a83-898d-3c9166555c2c-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.150878 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.150934 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.150994 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/7896ff15-1225-4a83-898d-3c9166555c2c-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: E1202 00:23:27.151253 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.151700 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/7896ff15-1225-4a83-898d-3c9166555c2c-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.151786 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/7896ff15-1225-4a83-898d-3c9166555c2c-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: E1202 00:23:27.152056 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls podName:7896ff15-1225-4a83-898d-3c9166555c2c nodeName:}" failed. No retries permitted until 2025-12-02 00:23:27.651326197 +0000 UTC m=+1034.677694191 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" (UID: "7896ff15-1225-4a83-898d-3c9166555c2c") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.161558 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.168583 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz975\" (UniqueName: \"kubernetes.io/projected/7896ff15-1225-4a83-898d-3c9166555c2c-kube-api-access-kz975\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: I1202 00:23:27.655625 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:27 crc kubenswrapper[4856]: E1202 00:23:27.655802 4856 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 02 00:23:27 crc kubenswrapper[4856]: E1202 00:23:27.655887 4856 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls podName:7896ff15-1225-4a83-898d-3c9166555c2c nodeName:}" failed. No retries permitted until 2025-12-02 00:23:28.655869351 +0000 UTC m=+1035.682237355 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" (UID: "7896ff15-1225-4a83-898d-3c9166555c2c") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 02 00:23:28 crc kubenswrapper[4856]: I1202 00:23:28.672182 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:28 crc kubenswrapper[4856]: I1202 00:23:28.677816 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/7896ff15-1225-4a83-898d-3c9166555c2c-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p\" (UID: \"7896ff15-1225-4a83-898d-3c9166555c2c\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:28 crc kubenswrapper[4856]: I1202 00:23:28.718790 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" Dec 02 00:23:30 crc kubenswrapper[4856]: I1202 00:23:30.428984 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg"] Dec 02 00:23:30 crc kubenswrapper[4856]: I1202 00:23:30.596572 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p"] Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.219479 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerStarted","Data":"e2c4ff1489058c5d0f036f5239fa24af12399acc1921f3f32abc7582a2184844"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.223934 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"dc28fa42-75c6-4d34-8d03-6e759368f5e5","Type":"ContainerStarted","Data":"0ce043fa4b711d2b051204cf024fdd70508471fd61c0d72635351203ea562278"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.226649 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"e9e51c4b9de92044274fa68ff1fee0b2f35c171eb7d116467a0ec55c231070c7"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.226695 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"01a9d8235f2fdc6a1033974133ba0f049db4d6c7ddc6252951ea4034d6ad8712"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.236915 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"d96091b41f8ec6a5cbd861c325ecec128a221bdb06a321a6d468d9ea1140f600"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.238613 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"f310ca94de4e6407c44f955554f21bdfed4a576b72ba60bea3da179b15fdb893"} Dec 02 00:23:31 crc kubenswrapper[4856]: I1202 00:23:31.265548 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.600718641 podStartE2EDuration="40.265524832s" podCreationTimestamp="2025-12-02 00:22:51 +0000 UTC" firstStartedPulling="2025-12-02 00:22:54.436943996 +0000 UTC m=+1001.463312000" lastFinishedPulling="2025-12-02 00:23:30.101750187 +0000 UTC m=+1037.128118191" observedRunningTime="2025-12-02 00:23:31.243508213 +0000 UTC m=+1038.269876217" watchObservedRunningTime="2025-12-02 00:23:31.265524832 +0000 UTC m=+1038.291892836" Dec 02 00:23:32 crc kubenswrapper[4856]: I1202 00:23:32.252182 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"22f6f59347ffb1d307902d7deee05f3b8781aba5b76e2b4c85d978758e6011df"} Dec 02 00:23:32 crc kubenswrapper[4856]: I1202 00:23:32.259708 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerStarted","Data":"a9c565f58758b3c6fecdaa6cd972ecf66ffbfdde401a11ff97dbebd2a34b3341"} Dec 02 00:23:33 crc kubenswrapper[4856]: I1202 00:23:33.279793 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"9c32b620-4831-417b-b2bb-5bc26dc65c4d","Type":"ContainerStarted","Data":"2853995731b5a160c1f7ff8754ae2fa89762259bb32700619534edb422b02088"} Dec 02 00:23:33 crc kubenswrapper[4856]: I1202 00:23:33.319354 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=18.755149291 podStartE2EDuration="28.319338337s" podCreationTimestamp="2025-12-02 00:23:05 +0000 UTC" firstStartedPulling="2025-12-02 00:23:23.146283674 +0000 UTC m=+1030.172651678" lastFinishedPulling="2025-12-02 00:23:32.71047272 +0000 UTC m=+1039.736840724" observedRunningTime="2025-12-02 00:23:33.31297735 +0000 UTC m=+1040.339345354" watchObservedRunningTime="2025-12-02 00:23:33.319338337 +0000 UTC m=+1040.345706331" Dec 02 00:23:34 crc kubenswrapper[4856]: I1202 00:23:34.133867 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.537871 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt"] Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.541487 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.544441 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.544764 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.569835 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt"] Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.693544 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.693885 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.693914 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49pqs\" (UniqueName: \"kubernetes.io/projected/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-kube-api-access-49pqs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.693935 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.795515 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.795565 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49pqs\" (UniqueName: \"kubernetes.io/projected/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-kube-api-access-49pqs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.795602 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.795691 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.796197 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.796517 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.811601 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49pqs\" (UniqueName: \"kubernetes.io/projected/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-kube-api-access-49pqs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.812517 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt\" (UID: \"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:35 crc kubenswrapper[4856]: I1202 00:23:35.869694 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.329896 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4"] Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.331032 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.337002 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.341345 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4"] Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.420449 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ccrn\" (UniqueName: \"kubernetes.io/projected/ef80c4b0-49ad-4ed2-a628-007510f56965-kube-api-access-2ccrn\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.420610 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ef80c4b0-49ad-4ed2-a628-007510f56965-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.420649 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ef80c4b0-49ad-4ed2-a628-007510f56965-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.420668 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ef80c4b0-49ad-4ed2-a628-007510f56965-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.521681 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ef80c4b0-49ad-4ed2-a628-007510f56965-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.521715 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ef80c4b0-49ad-4ed2-a628-007510f56965-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.521755 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ccrn\" (UniqueName: \"kubernetes.io/projected/ef80c4b0-49ad-4ed2-a628-007510f56965-kube-api-access-2ccrn\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.521830 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ef80c4b0-49ad-4ed2-a628-007510f56965-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.522133 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ef80c4b0-49ad-4ed2-a628-007510f56965-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.522554 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ef80c4b0-49ad-4ed2-a628-007510f56965-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.526844 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ef80c4b0-49ad-4ed2-a628-007510f56965-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.541531 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ccrn\" (UniqueName: \"kubernetes.io/projected/ef80c4b0-49ad-4ed2-a628-007510f56965-kube-api-access-2ccrn\") pod \"default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4\" (UID: \"ef80c4b0-49ad-4ed2-a628-007510f56965\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:37 crc kubenswrapper[4856]: I1202 00:23:37.655550 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" Dec 02 00:23:38 crc kubenswrapper[4856]: I1202 00:23:38.853149 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt"] Dec 02 00:23:38 crc kubenswrapper[4856]: I1202 00:23:38.926545 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4"] Dec 02 00:23:38 crc kubenswrapper[4856]: W1202 00:23:38.933127 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef80c4b0_49ad_4ed2_a628_007510f56965.slice/crio-349a24746639d808dc8e825f788e3b7204aa92c8905d9ddf9f1e7ca94f072c86 WatchSource:0}: Error finding container 349a24746639d808dc8e825f788e3b7204aa92c8905d9ddf9f1e7ca94f072c86: Status 404 returned error can't find the container with id 349a24746639d808dc8e825f788e3b7204aa92c8905d9ddf9f1e7ca94f072c86 Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.134204 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.176868 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.321338 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"203de7c1fb1c0f8618ae0aad693b86346c0be39305b4527e4bf552d7fd9ed71c"} Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.323374 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"f6db94a39e6c7e39edb8cd4eec656af1561209c4b151cddbbef748d01e391d0f"} Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.325401 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"08b6327e496023dfc9df64a206a8c5a87bda257f05ce15388d4f79c4708808c1"} Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.326779 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerStarted","Data":"ad2f6752b36d892df8073755c618eefee428a01e4edd94621247b3d1a299e5f6"} Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.328723 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerStarted","Data":"349a24746639d808dc8e825f788e3b7204aa92c8905d9ddf9f1e7ca94f072c86"} Dec 02 00:23:39 crc kubenswrapper[4856]: I1202 00:23:39.364526 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Dec 02 00:23:40 crc kubenswrapper[4856]: I1202 00:23:40.338324 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerStarted","Data":"468a4ee7fcff2553ca6a50e3fbda9d6f27f565f4522e87df02d10a5a7a4fbe5d"} Dec 02 00:23:40 crc kubenswrapper[4856]: I1202 00:23:40.340244 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerStarted","Data":"f1b3719983136c7f4ad4f3c562e1a11d8e18de3af0390070729212d931e627f8"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.427947 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerStarted","Data":"8e2a45c53043c432071cae25c9f9616c862cbe13d68ce5039f9be6fcac6ec00b"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.431030 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerStarted","Data":"f95b9a4abf3c34339655275affff21c38f354630c4d31fb4cb521e5de241c154"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.437359 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"e8cdbcdf90fbf356f7dbcb2b505addaa609995f6fdc7cb2674901149f12e62d5"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.439537 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"0d2a80443f633338aa64fb148154e64b29c7ff8371abdb74b8a51f24e2f25e07"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.441919 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"434cf272896fa037bb632b7d4ce881dd041d8b42a3d6c1d88bff04b723103e71"} Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.460088 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" podStartSLOduration=4.664695468 podStartE2EDuration="11.460069458s" podCreationTimestamp="2025-12-02 00:23:35 +0000 UTC" firstStartedPulling="2025-12-02 00:23:38.883488006 +0000 UTC m=+1045.909856010" lastFinishedPulling="2025-12-02 00:23:45.678861996 +0000 UTC m=+1052.705230000" observedRunningTime="2025-12-02 00:23:46.456306885 +0000 UTC m=+1053.482674909" watchObservedRunningTime="2025-12-02 00:23:46.460069458 +0000 UTC m=+1053.486437462" Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.523070 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" podStartSLOduration=5.496693047 podStartE2EDuration="20.523051496s" podCreationTimestamp="2025-12-02 00:23:26 +0000 UTC" firstStartedPulling="2025-12-02 00:23:30.592633856 +0000 UTC m=+1037.619001870" lastFinishedPulling="2025-12-02 00:23:45.618992315 +0000 UTC m=+1052.645360319" observedRunningTime="2025-12-02 00:23:46.520547574 +0000 UTC m=+1053.546915578" watchObservedRunningTime="2025-12-02 00:23:46.523051496 +0000 UTC m=+1053.549419500" Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.525691 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" podStartSLOduration=2.872147378 podStartE2EDuration="9.52567792s" podCreationTimestamp="2025-12-02 00:23:37 +0000 UTC" firstStartedPulling="2025-12-02 00:23:38.943040369 +0000 UTC m=+1045.969408373" lastFinishedPulling="2025-12-02 00:23:45.596570911 +0000 UTC m=+1052.622938915" observedRunningTime="2025-12-02 00:23:46.480989245 +0000 UTC m=+1053.507357269" watchObservedRunningTime="2025-12-02 00:23:46.52567792 +0000 UTC m=+1053.552045924" Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.631173 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" podStartSLOduration=3.609408227 podStartE2EDuration="27.631125029s" podCreationTimestamp="2025-12-02 00:23:19 +0000 UTC" firstStartedPulling="2025-12-02 00:23:21.658899797 +0000 UTC m=+1028.685267821" lastFinishedPulling="2025-12-02 00:23:45.680616619 +0000 UTC m=+1052.706984623" observedRunningTime="2025-12-02 00:23:46.628021062 +0000 UTC m=+1053.654389066" watchObservedRunningTime="2025-12-02 00:23:46.631125029 +0000 UTC m=+1053.657493033" Dec 02 00:23:46 crc kubenswrapper[4856]: I1202 00:23:46.632337 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" podStartSLOduration=8.448209692 podStartE2EDuration="23.632330808s" podCreationTimestamp="2025-12-02 00:23:23 +0000 UTC" firstStartedPulling="2025-12-02 00:23:30.447786839 +0000 UTC m=+1037.474154843" lastFinishedPulling="2025-12-02 00:23:45.631907955 +0000 UTC m=+1052.658275959" observedRunningTime="2025-12-02 00:23:46.577923363 +0000 UTC m=+1053.604291377" watchObservedRunningTime="2025-12-02 00:23:46.632330808 +0000 UTC m=+1053.658698812" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.078605 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.079061 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" podUID="1a61e765-280e-415c-96a5-797fc8a3d133" containerName="default-interconnect" containerID="cri-o://c40c3c7978c9efbb6efb68f09fd6563ab73b3fe41db7d4ad08511933933db714" gracePeriod=30 Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.474048 4856 generic.go:334] "Generic (PLEG): container finished" podID="1a61e765-280e-415c-96a5-797fc8a3d133" containerID="c40c3c7978c9efbb6efb68f09fd6563ab73b3fe41db7d4ad08511933933db714" exitCode=0 Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.474398 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" event={"ID":"1a61e765-280e-415c-96a5-797fc8a3d133","Type":"ContainerDied","Data":"c40c3c7978c9efbb6efb68f09fd6563ab73b3fe41db7d4ad08511933933db714"} Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.567971 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624706 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624762 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624793 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcfqk\" (UniqueName: \"kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624820 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624852 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624891 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.624931 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials\") pod \"1a61e765-280e-415c-96a5-797fc8a3d133\" (UID: \"1a61e765-280e-415c-96a5-797fc8a3d133\") " Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.625508 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.629542 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.629978 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.630309 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.630994 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk" (OuterVolumeSpecName: "kube-api-access-lcfqk") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "kube-api-access-lcfqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.631853 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.634949 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "1a61e765-280e-415c-96a5-797fc8a3d133" (UID: "1a61e765-280e-415c-96a5-797fc8a3d133"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.726659 4856 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-users\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.726898 4856 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.726966 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcfqk\" (UniqueName: \"kubernetes.io/projected/1a61e765-280e-415c-96a5-797fc8a3d133-kube-api-access-lcfqk\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.727058 4856 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/1a61e765-280e-415c-96a5-797fc8a3d133-sasl-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.727122 4856 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.727186 4856 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:49 crc kubenswrapper[4856]: I1202 00:23:49.727240 4856 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/1a61e765-280e-415c-96a5-797fc8a3d133-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.327492 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lfdgc"] Dec 02 00:23:50 crc kubenswrapper[4856]: E1202 00:23:50.327770 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a61e765-280e-415c-96a5-797fc8a3d133" containerName="default-interconnect" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.327782 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a61e765-280e-415c-96a5-797fc8a3d133" containerName="default-interconnect" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.327905 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a61e765-280e-415c-96a5-797fc8a3d133" containerName="default-interconnect" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.328370 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.345882 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lfdgc"] Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438673 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438747 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438821 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-users\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438854 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-config\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438877 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r64sw\" (UniqueName: \"kubernetes.io/projected/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-kube-api-access-r64sw\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438953 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.438981 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.482684 4856 generic.go:334] "Generic (PLEG): container finished" podID="f8c47b36-32a8-4485-b337-3b92535e0875" containerID="f6db94a39e6c7e39edb8cd4eec656af1561209c4b151cddbbef748d01e391d0f" exitCode=0 Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.482752 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerDied","Data":"f6db94a39e6c7e39edb8cd4eec656af1561209c4b151cddbbef748d01e391d0f"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.483240 4856 scope.go:117] "RemoveContainer" containerID="f6db94a39e6c7e39edb8cd4eec656af1561209c4b151cddbbef748d01e391d0f" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.483969 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" event={"ID":"1a61e765-280e-415c-96a5-797fc8a3d133","Type":"ContainerDied","Data":"38f199763791e6cd9af5a7ef4b6500b254ef5c2520ca1eaec0f46f3c8ba0b79b"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.483998 4856 scope.go:117] "RemoveContainer" containerID="c40c3c7978c9efbb6efb68f09fd6563ab73b3fe41db7d4ad08511933933db714" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.484096 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-hmpjc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.488202 4856 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.490330 4856 generic.go:334] "Generic (PLEG): container finished" podID="7896ff15-1225-4a83-898d-3c9166555c2c" containerID="08b6327e496023dfc9df64a206a8c5a87bda257f05ce15388d4f79c4708808c1" exitCode=0 Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.490372 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerDied","Data":"08b6327e496023dfc9df64a206a8c5a87bda257f05ce15388d4f79c4708808c1"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.490967 4856 scope.go:117] "RemoveContainer" containerID="08b6327e496023dfc9df64a206a8c5a87bda257f05ce15388d4f79c4708808c1" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.492312 4856 generic.go:334] "Generic (PLEG): container finished" podID="44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01" containerID="468a4ee7fcff2553ca6a50e3fbda9d6f27f565f4522e87df02d10a5a7a4fbe5d" exitCode=0 Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.492361 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerDied","Data":"468a4ee7fcff2553ca6a50e3fbda9d6f27f565f4522e87df02d10a5a7a4fbe5d"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.492652 4856 scope.go:117] "RemoveContainer" containerID="468a4ee7fcff2553ca6a50e3fbda9d6f27f565f4522e87df02d10a5a7a4fbe5d" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.495195 4856 generic.go:334] "Generic (PLEG): container finished" podID="ef80c4b0-49ad-4ed2-a628-007510f56965" containerID="f1b3719983136c7f4ad4f3c562e1a11d8e18de3af0390070729212d931e627f8" exitCode=0 Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.495247 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerDied","Data":"f1b3719983136c7f4ad4f3c562e1a11d8e18de3af0390070729212d931e627f8"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.495652 4856 scope.go:117] "RemoveContainer" containerID="f1b3719983136c7f4ad4f3c562e1a11d8e18de3af0390070729212d931e627f8" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.502692 4856 generic.go:334] "Generic (PLEG): container finished" podID="16a2092a-9a92-431a-9cb4-2c760fb5b5f6" containerID="203de7c1fb1c0f8618ae0aad693b86346c0be39305b4527e4bf552d7fd9ed71c" exitCode=0 Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.502742 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerDied","Data":"203de7c1fb1c0f8618ae0aad693b86346c0be39305b4527e4bf552d7fd9ed71c"} Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.503226 4856 scope.go:117] "RemoveContainer" containerID="203de7c1fb1c0f8618ae0aad693b86346c0be39305b4527e4bf552d7fd9ed71c" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.540452 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-config\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.540508 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r64sw\" (UniqueName: \"kubernetes.io/projected/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-kube-api-access-r64sw\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.540561 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.541405 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-config\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.541693 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.541963 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.542003 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.542117 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-users\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.547641 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.557489 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.560029 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-sasl-users\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.566402 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.567087 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.569393 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r64sw\" (UniqueName: \"kubernetes.io/projected/2d08f322-2d31-4b5b-a42a-edcdb7a4da4d-kube-api-access-r64sw\") pod \"default-interconnect-68864d46cb-lfdgc\" (UID: \"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d\") " pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.632747 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.635334 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-hmpjc"] Dec 02 00:23:50 crc kubenswrapper[4856]: I1202 00:23:50.646312 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" Dec 02 00:23:51 crc kubenswrapper[4856]: I1202 00:23:51.132907 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-lfdgc"] Dec 02 00:23:51 crc kubenswrapper[4856]: W1202 00:23:51.137900 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d08f322_2d31_4b5b_a42a_edcdb7a4da4d.slice/crio-0a4e7ac43378fda68d2547ad435dd6ea24f9a0d27400489032994252f6f69182 WatchSource:0}: Error finding container 0a4e7ac43378fda68d2547ad435dd6ea24f9a0d27400489032994252f6f69182: Status 404 returned error can't find the container with id 0a4e7ac43378fda68d2547ad435dd6ea24f9a0d27400489032994252f6f69182 Dec 02 00:23:51 crc kubenswrapper[4856]: I1202 00:23:51.260198 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a61e765-280e-415c-96a5-797fc8a3d133" path="/var/lib/kubelet/pods/1a61e765-280e-415c-96a5-797fc8a3d133/volumes" Dec 02 00:23:51 crc kubenswrapper[4856]: I1202 00:23:51.510573 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" event={"ID":"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d","Type":"ContainerStarted","Data":"0a4e7ac43378fda68d2547ad435dd6ea24f9a0d27400489032994252f6f69182"} Dec 02 00:23:55 crc kubenswrapper[4856]: I1202 00:23:55.536270 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" event={"ID":"2d08f322-2d31-4b5b-a42a-edcdb7a4da4d","Type":"ContainerStarted","Data":"ac0ddeb3e47e168da0ffb51ea3d1bc25282dad435489e980d8e587f99911e71c"} Dec 02 00:23:55 crc kubenswrapper[4856]: I1202 00:23:55.559835 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-lfdgc" podStartSLOduration=6.559818412 podStartE2EDuration="6.559818412s" podCreationTimestamp="2025-12-02 00:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:23:55.554274025 +0000 UTC m=+1062.580642029" watchObservedRunningTime="2025-12-02 00:23:55.559818412 +0000 UTC m=+1062.586186416" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.244854 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.245924 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.250249 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.251174 4856 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.259836 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.429088 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.429139 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lsbt\" (UniqueName: \"kubernetes.io/projected/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-kube-api-access-9lsbt\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.429282 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-qdr-test-config\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.530516 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.530570 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lsbt\" (UniqueName: \"kubernetes.io/projected/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-kube-api-access-9lsbt\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.530629 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-qdr-test-config\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.531284 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-qdr-test-config\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.535990 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.542643 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"d185a0b4f9651f487a083622df8ae2c42317b11aa9c6ecbb3ded4341a4ff07c0"} Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.544252 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerStarted","Data":"d6e6d53972a98b36f2baddac9c4c41ddeeda26c55135950d90bc245600fcbff6"} Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.547195 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerStarted","Data":"ded986489d581382a271c3993d2bdb20d5add9b94721645f66019bbc9e9316d4"} Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.548081 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lsbt\" (UniqueName: \"kubernetes.io/projected/0f7dbe4c-13b8-45ac-987a-1125b1a93c6e-kube-api-access-9lsbt\") pod \"qdr-test\" (UID: \"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e\") " pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.549651 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"c4e90df1447d27d994135fc6b6d5e6cb2f4f6d872ca453b7092532f26e55875c"} Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.551856 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"551b71f6dc041ce0a2d43dbcceafb3665c9a96488a8bad61efd8b37cefc896fb"} Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.558532 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 02 00:23:56 crc kubenswrapper[4856]: I1202 00:23:56.886683 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.570503 4856 generic.go:334] "Generic (PLEG): container finished" podID="16a2092a-9a92-431a-9cb4-2c760fb5b5f6" containerID="c4e90df1447d27d994135fc6b6d5e6cb2f4f6d872ca453b7092532f26e55875c" exitCode=0 Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.570563 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerDied","Data":"c4e90df1447d27d994135fc6b6d5e6cb2f4f6d872ca453b7092532f26e55875c"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.571201 4856 scope.go:117] "RemoveContainer" containerID="203de7c1fb1c0f8618ae0aad693b86346c0be39305b4527e4bf552d7fd9ed71c" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.571902 4856 scope.go:117] "RemoveContainer" containerID="c4e90df1447d27d994135fc6b6d5e6cb2f4f6d872ca453b7092532f26e55875c" Dec 02 00:23:57 crc kubenswrapper[4856]: E1202 00:23:57.572160 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg_service-telemetry(16a2092a-9a92-431a-9cb4-2c760fb5b5f6)\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" podUID="16a2092a-9a92-431a-9cb4-2c760fb5b5f6" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.581848 4856 generic.go:334] "Generic (PLEG): container finished" podID="f8c47b36-32a8-4485-b337-3b92535e0875" containerID="551b71f6dc041ce0a2d43dbcceafb3665c9a96488a8bad61efd8b37cefc896fb" exitCode=0 Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.581958 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerDied","Data":"551b71f6dc041ce0a2d43dbcceafb3665c9a96488a8bad61efd8b37cefc896fb"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.582525 4856 scope.go:117] "RemoveContainer" containerID="551b71f6dc041ce0a2d43dbcceafb3665c9a96488a8bad61efd8b37cefc896fb" Dec 02 00:23:57 crc kubenswrapper[4856]: E1202 00:23:57.582787 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw_service-telemetry(f8c47b36-32a8-4485-b337-3b92535e0875)\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" podUID="f8c47b36-32a8-4485-b337-3b92535e0875" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.587873 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e","Type":"ContainerStarted","Data":"cab5cc8f1e61c3ab86e11fea3303066db97dd9cae53db0dec2bdaacd29f70ef8"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.593120 4856 generic.go:334] "Generic (PLEG): container finished" podID="7896ff15-1225-4a83-898d-3c9166555c2c" containerID="d185a0b4f9651f487a083622df8ae2c42317b11aa9c6ecbb3ded4341a4ff07c0" exitCode=0 Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.593196 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerDied","Data":"d185a0b4f9651f487a083622df8ae2c42317b11aa9c6ecbb3ded4341a4ff07c0"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.593713 4856 scope.go:117] "RemoveContainer" containerID="d185a0b4f9651f487a083622df8ae2c42317b11aa9c6ecbb3ded4341a4ff07c0" Dec 02 00:23:57 crc kubenswrapper[4856]: E1202 00:23:57.593902 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p_service-telemetry(7896ff15-1225-4a83-898d-3c9166555c2c)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" podUID="7896ff15-1225-4a83-898d-3c9166555c2c" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.597812 4856 generic.go:334] "Generic (PLEG): container finished" podID="44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01" containerID="d6e6d53972a98b36f2baddac9c4c41ddeeda26c55135950d90bc245600fcbff6" exitCode=0 Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.597884 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerDied","Data":"d6e6d53972a98b36f2baddac9c4c41ddeeda26c55135950d90bc245600fcbff6"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.598430 4856 scope.go:117] "RemoveContainer" containerID="d6e6d53972a98b36f2baddac9c4c41ddeeda26c55135950d90bc245600fcbff6" Dec 02 00:23:57 crc kubenswrapper[4856]: E1202 00:23:57.598721 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt_service-telemetry(44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" podUID="44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.601809 4856 generic.go:334] "Generic (PLEG): container finished" podID="ef80c4b0-49ad-4ed2-a628-007510f56965" containerID="ded986489d581382a271c3993d2bdb20d5add9b94721645f66019bbc9e9316d4" exitCode=0 Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.601845 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerDied","Data":"ded986489d581382a271c3993d2bdb20d5add9b94721645f66019bbc9e9316d4"} Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.602193 4856 scope.go:117] "RemoveContainer" containerID="ded986489d581382a271c3993d2bdb20d5add9b94721645f66019bbc9e9316d4" Dec 02 00:23:57 crc kubenswrapper[4856]: E1202 00:23:57.602395 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4_service-telemetry(ef80c4b0-49ad-4ed2-a628-007510f56965)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" podUID="ef80c4b0-49ad-4ed2-a628-007510f56965" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.636543 4856 scope.go:117] "RemoveContainer" containerID="f6db94a39e6c7e39edb8cd4eec656af1561209c4b151cddbbef748d01e391d0f" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.680889 4856 scope.go:117] "RemoveContainer" containerID="08b6327e496023dfc9df64a206a8c5a87bda257f05ce15388d4f79c4708808c1" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.736779 4856 scope.go:117] "RemoveContainer" containerID="468a4ee7fcff2553ca6a50e3fbda9d6f27f565f4522e87df02d10a5a7a4fbe5d" Dec 02 00:23:57 crc kubenswrapper[4856]: I1202 00:23:57.792797 4856 scope.go:117] "RemoveContainer" containerID="f1b3719983136c7f4ad4f3c562e1a11d8e18de3af0390070729212d931e627f8" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.687570 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"0f7dbe4c-13b8-45ac-987a-1125b1a93c6e","Type":"ContainerStarted","Data":"91e3da4df0dba5f71598329b1bf52e57eba2bce03ba667e3453262360a297abd"} Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.708485 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.9470485050000002 podStartE2EDuration="9.708448448s" podCreationTimestamp="2025-12-02 00:23:56 +0000 UTC" firstStartedPulling="2025-12-02 00:23:56.895361954 +0000 UTC m=+1063.921729958" lastFinishedPulling="2025-12-02 00:24:04.656761897 +0000 UTC m=+1071.683129901" observedRunningTime="2025-12-02 00:24:05.707194367 +0000 UTC m=+1072.733562371" watchObservedRunningTime="2025-12-02 00:24:05.708448448 +0000 UTC m=+1072.734816452" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.966202 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xbsq7"] Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.967225 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.969279 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.971493 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.971538 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.974557 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.974635 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.974647 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 02 00:24:05 crc kubenswrapper[4856]: I1202 00:24:05.979956 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xbsq7"] Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.072353 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.072396 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k59f\" (UniqueName: \"kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.072450 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.073004 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.073141 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.073255 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.073390 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174072 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174130 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174152 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174172 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k59f\" (UniqueName: \"kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174213 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174246 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.174271 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.175019 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.175017 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.175269 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.175464 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.175497 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.176026 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.192938 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k59f\" (UniqueName: \"kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f\") pod \"stf-smoketest-smoke1-xbsq7\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.284931 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.289113 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.289883 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.303177 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.479798 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx4np\" (UniqueName: \"kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np\") pod \"curl\" (UID: \"a8e0ec98-e166-42a0-8768-f99a31f176f1\") " pod="service-telemetry/curl" Dec 02 00:24:06 crc kubenswrapper[4856]: W1202 00:24:06.507180 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc1a5ec0_f15f_4dc5_b9f8_970a43d689cc.slice/crio-45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861 WatchSource:0}: Error finding container 45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861: Status 404 returned error can't find the container with id 45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861 Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.522192 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-xbsq7"] Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.581460 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx4np\" (UniqueName: \"kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np\") pod \"curl\" (UID: \"a8e0ec98-e166-42a0-8768-f99a31f176f1\") " pod="service-telemetry/curl" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.599964 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx4np\" (UniqueName: \"kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np\") pod \"curl\" (UID: \"a8e0ec98-e166-42a0-8768-f99a31f176f1\") " pod="service-telemetry/curl" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.660901 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.695947 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerStarted","Data":"45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861"} Dec 02 00:24:06 crc kubenswrapper[4856]: I1202 00:24:06.968423 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 02 00:24:06 crc kubenswrapper[4856]: W1202 00:24:06.976149 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8e0ec98_e166_42a0_8768_f99a31f176f1.slice/crio-275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549 WatchSource:0}: Error finding container 275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549: Status 404 returned error can't find the container with id 275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549 Dec 02 00:24:07 crc kubenswrapper[4856]: I1202 00:24:07.710270 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"a8e0ec98-e166-42a0-8768-f99a31f176f1","Type":"ContainerStarted","Data":"275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549"} Dec 02 00:24:09 crc kubenswrapper[4856]: I1202 00:24:09.726866 4856 generic.go:334] "Generic (PLEG): container finished" podID="a8e0ec98-e166-42a0-8768-f99a31f176f1" containerID="f6d14bb53d581570291b56218051dbafcbfe82077418e412b5f55c67b543830b" exitCode=0 Dec 02 00:24:09 crc kubenswrapper[4856]: I1202 00:24:09.727058 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"a8e0ec98-e166-42a0-8768-f99a31f176f1","Type":"ContainerDied","Data":"f6d14bb53d581570291b56218051dbafcbfe82077418e412b5f55c67b543830b"} Dec 02 00:24:10 crc kubenswrapper[4856]: I1202 00:24:10.252484 4856 scope.go:117] "RemoveContainer" containerID="ded986489d581382a271c3993d2bdb20d5add9b94721645f66019bbc9e9316d4" Dec 02 00:24:11 crc kubenswrapper[4856]: I1202 00:24:11.255808 4856 scope.go:117] "RemoveContainer" containerID="551b71f6dc041ce0a2d43dbcceafb3665c9a96488a8bad61efd8b37cefc896fb" Dec 02 00:24:11 crc kubenswrapper[4856]: I1202 00:24:11.256096 4856 scope.go:117] "RemoveContainer" containerID="d6e6d53972a98b36f2baddac9c4c41ddeeda26c55135950d90bc245600fcbff6" Dec 02 00:24:12 crc kubenswrapper[4856]: I1202 00:24:12.251982 4856 scope.go:117] "RemoveContainer" containerID="d185a0b4f9651f487a083622df8ae2c42317b11aa9c6ecbb3ded4341a4ff07c0" Dec 02 00:24:12 crc kubenswrapper[4856]: I1202 00:24:12.252342 4856 scope.go:117] "RemoveContainer" containerID="c4e90df1447d27d994135fc6b6d5e6cb2f4f6d872ca453b7092532f26e55875c" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.010795 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.089969 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx4np\" (UniqueName: \"kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np\") pod \"a8e0ec98-e166-42a0-8768-f99a31f176f1\" (UID: \"a8e0ec98-e166-42a0-8768-f99a31f176f1\") " Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.115752 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np" (OuterVolumeSpecName: "kube-api-access-gx4np") pod "a8e0ec98-e166-42a0-8768-f99a31f176f1" (UID: "a8e0ec98-e166-42a0-8768-f99a31f176f1"). InnerVolumeSpecName "kube-api-access-gx4np". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.144789 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_a8e0ec98-e166-42a0-8768-f99a31f176f1/curl/0.log" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.191832 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx4np\" (UniqueName: \"kubernetes.io/projected/a8e0ec98-e166-42a0-8768-f99a31f176f1-kube-api-access-gx4np\") on node \"crc\" DevicePath \"\"" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.431817 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-42chm_df74219e-9434-4ae0-a7d9-61bfdad38211/prometheus-webhook-snmp/0.log" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.753502 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"a8e0ec98-e166-42a0-8768-f99a31f176f1","Type":"ContainerDied","Data":"275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549"} Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.753532 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 02 00:24:13 crc kubenswrapper[4856]: I1202 00:24:13.753539 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="275138e35ec37ef1bd8b634b12c4c0ec635367ab31b28620b262734db4fd3549" Dec 02 00:24:20 crc kubenswrapper[4856]: E1202 00:24:20.978218 4856 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/tripleomastercentos9/openstack-collectd:current-tripleo" Dec 02 00:24:20 crc kubenswrapper[4856]: E1202 00:24:20.978976 4856 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:smoketest-collectd,Image:quay.io/tripleomastercentos9/openstack-collectd:current-tripleo,Command:[/smoketest_collectd_entrypoint.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CLOUDNAME,Value:smoke1,ValueFrom:nil,},EnvVar{Name:ELASTICSEARCH_AUTH_PASS,Value:fkIBm2kOOtvpovmiWXO4xJKk,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_AUTH_TOKEN,Value:eyJhbGciOiJSUzI1NiIsImtpZCI6InF6SnFxNFFjbVk5VmJQZ2dNMmUxdHFmTlJlVWx4UDhSTlhIamV3RUx4WU0ifQ.eyJhdWQiOlsiaHR0cHM6Ly9rdWJlcm5ldGVzLmRlZmF1bHQuc3ZjIl0sImV4cCI6MTc2NDYzODYzMSwiaWF0IjoxNzY0NjM1MDMxLCJpc3MiOiJodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmMiLCJqdGkiOiIxZDc0YTJlNC0zOWViLTRjMWUtODZhZS1lYjc2Zjk1NzFjZjkiLCJrdWJlcm5ldGVzLmlvIjp7Im5hbWVzcGFjZSI6InNlcnZpY2UtdGVsZW1ldHJ5Iiwic2VydmljZWFjY291bnQiOnsibmFtZSI6InN0Zi1wcm9tZXRoZXVzLXJlYWRlciIsInVpZCI6IjMyZDhmNTJmLWRjNTgtNDg3OS1hYzc2LTlkYWU0ZDMzNDhhYiJ9fSwibmJmIjoxNzY0NjM1MDMxLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6c2VydmljZS10ZWxlbWV0cnk6c3RmLXByb21ldGhldXMtcmVhZGVyIn0.bpAT6mNVpnzpBBuyPgI-_MCNfikEya9Ghz6ZbALkPgkR38EOoKkfJKSADf-JIK9oBfVYGs_hdCzSrSHfVPpkwAIbsuidqFaj1xllf_JQNArEqM2lR5N_zQ2oBk_QWbqTu9dmNq8nxEzupPmAjl8OXeAgu0PhVvIbx5nrksrS8Hm0Ie82Ot_KsaFYgKj5TSkxWAtOOqChIL0YWe2dYrrPP1cJIx35xpja5usczdf-4xtghIAsZNxgNurHXphYv4Y1E9n27L0pULooKR0MO5vNXgSvPFxrrLW8Lkx3HGcP9gxtDUgSGl1ya-AbE06U0_TbH5zIZTrhs8kPL5Sz9b9SuAWsxRIkkJeu2HGJ_U5H_HZBQue_ug88IuRIsCn0SMXfxHIeWhiBDQ-9_lUgY9G8TT7ugqkyNn6agbevJRSsz8ms_AHEI_-ueAb547lH_uNkS_bskg_DafuzHbWrDgAd6nEmN82rUGkvt4je2omHDyd-9fb1KL0IafUgKjvmJYI6AA0dbf_NuYAjmWiqhQ001Kq5sZ0IOPmt1q5ca8gXkYjZjW0dcUzLEJ9nSL_ByQuBEyj8FDx3zAEFYP3OR60hsMXQ1AW-T1vCL55rUiWvikEZZEaBz24esNaKcfP8zmUKA_F9yiSXPgdpJ0Ow3uwwL0nrRxMa9yMxv4XXy0-6Ll4,ValueFrom:nil,},EnvVar{Name:OBSERVABILITY_STRATEGY,Value:<>,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:collectd-config,ReadOnly:false,MountPath:/etc/minimal-collectd.conf.template,SubPath:minimal-collectd.conf.template,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sensubility-config,ReadOnly:false,MountPath:/etc/collectd-sensubility.conf,SubPath:collectd-sensubility.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:healthcheck-log,ReadOnly:false,MountPath:/healthcheck.log,SubPath:healthcheck.log,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:collectd-entrypoint-script,ReadOnly:false,MountPath:/smoketest_collectd_entrypoint.sh,SubPath:smoketest_collectd_entrypoint.sh,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8k59f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod stf-smoketest-smoke1-xbsq7_service-telemetry(fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 00:24:21 crc kubenswrapper[4856]: I1202 00:24:21.828421 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg" event={"ID":"16a2092a-9a92-431a-9cb4-2c760fb5b5f6","Type":"ContainerStarted","Data":"f47ce1d0611e31834d4b6c641e05a99c1ec346edd68f8619cfac3737ae12575b"} Dec 02 00:24:21 crc kubenswrapper[4856]: I1202 00:24:21.830560 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p" event={"ID":"7896ff15-1225-4a83-898d-3c9166555c2c","Type":"ContainerStarted","Data":"9bf59e320741b5ea52ced0581bf7bdb7b9fcf48f9d7d1d12a3457fb2e5f67d38"} Dec 02 00:24:21 crc kubenswrapper[4856]: I1202 00:24:21.835342 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4" event={"ID":"ef80c4b0-49ad-4ed2-a628-007510f56965","Type":"ContainerStarted","Data":"5b0265cd8329d3b514b58dc2f10a38aae3812428654b0b544178601f115d2ab7"} Dec 02 00:24:21 crc kubenswrapper[4856]: I1202 00:24:21.839326 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw" event={"ID":"f8c47b36-32a8-4485-b337-3b92535e0875","Type":"ContainerStarted","Data":"e6893e200ff8ee90080a989679c583a9548dbd0749e553c8158d6168ba52a205"} Dec 02 00:24:21 crc kubenswrapper[4856]: I1202 00:24:21.841636 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt" event={"ID":"44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01","Type":"ContainerStarted","Data":"717f4872b590d1c6d2cbc7dd0d457fe9413c76f14f0fc92def0007a70abd376b"} Dec 02 00:24:30 crc kubenswrapper[4856]: E1202 00:24:30.774525 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" Dec 02 00:24:30 crc kubenswrapper[4856]: I1202 00:24:30.909767 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerStarted","Data":"35b3acc1f987407e13323ce3fd2716662d7610b55da500cd85a970a03d91211a"} Dec 02 00:24:30 crc kubenswrapper[4856]: E1202 00:24:30.911530 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/tripleomastercentos9/openstack-collectd:current-tripleo\\\"\"" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" Dec 02 00:24:33 crc kubenswrapper[4856]: I1202 00:24:33.933909 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerStarted","Data":"61b5b294be69d973f6906614355e12a80eb36f6b46aa754936b10e1c7fa04462"} Dec 02 00:24:33 crc kubenswrapper[4856]: I1202 00:24:33.958884 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" podStartSLOduration=2.7311905960000002 podStartE2EDuration="28.958865997s" podCreationTimestamp="2025-12-02 00:24:05 +0000 UTC" firstStartedPulling="2025-12-02 00:24:06.509635224 +0000 UTC m=+1073.536003218" lastFinishedPulling="2025-12-02 00:24:32.737310615 +0000 UTC m=+1099.763678619" observedRunningTime="2025-12-02 00:24:33.951211448 +0000 UTC m=+1100.977579462" watchObservedRunningTime="2025-12-02 00:24:33.958865997 +0000 UTC m=+1100.985233991" Dec 02 00:24:43 crc kubenswrapper[4856]: I1202 00:24:43.629326 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-42chm_df74219e-9434-4ae0-a7d9-61bfdad38211/prometheus-webhook-snmp/0.log" Dec 02 00:25:02 crc kubenswrapper[4856]: I1202 00:25:02.198437 4856 generic.go:334] "Generic (PLEG): container finished" podID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerID="35b3acc1f987407e13323ce3fd2716662d7610b55da500cd85a970a03d91211a" exitCode=0 Dec 02 00:25:02 crc kubenswrapper[4856]: I1202 00:25:02.198516 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerDied","Data":"35b3acc1f987407e13323ce3fd2716662d7610b55da500cd85a970a03d91211a"} Dec 02 00:25:02 crc kubenswrapper[4856]: I1202 00:25:02.199743 4856 scope.go:117] "RemoveContainer" containerID="35b3acc1f987407e13323ce3fd2716662d7610b55da500cd85a970a03d91211a" Dec 02 00:25:05 crc kubenswrapper[4856]: I1202 00:25:05.062177 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:25:05 crc kubenswrapper[4856]: I1202 00:25:05.062518 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:25:07 crc kubenswrapper[4856]: I1202 00:25:07.241477 4856 generic.go:334] "Generic (PLEG): container finished" podID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerID="61b5b294be69d973f6906614355e12a80eb36f6b46aa754936b10e1c7fa04462" exitCode=1 Dec 02 00:25:07 crc kubenswrapper[4856]: I1202 00:25:07.241540 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerDied","Data":"61b5b294be69d973f6906614355e12a80eb36f6b46aa754936b10e1c7fa04462"} Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.569907 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673124 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673187 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673236 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k59f\" (UniqueName: \"kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673282 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673349 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673391 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.673460 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script\") pod \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\" (UID: \"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc\") " Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.691221 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f" (OuterVolumeSpecName: "kube-api-access-8k59f") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "kube-api-access-8k59f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.695357 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.699968 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.701874 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.702916 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.703938 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.706124 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" (UID: "fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.776934 4856 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777150 4856 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777228 4856 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777294 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k59f\" (UniqueName: \"kubernetes.io/projected/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-kube-api-access-8k59f\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777350 4856 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777403 4856 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:08 crc kubenswrapper[4856]: I1202 00:25:08.777462 4856 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:09 crc kubenswrapper[4856]: I1202 00:25:09.265847 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" Dec 02 00:25:09 crc kubenswrapper[4856]: I1202 00:25:09.270037 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-xbsq7" event={"ID":"fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc","Type":"ContainerDied","Data":"45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861"} Dec 02 00:25:09 crc kubenswrapper[4856]: I1202 00:25:09.270097 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45c256854ef8a8184dadfec4d9630f5518925fbe3221d778da3af06ffd727861" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.021876 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-74ttd"] Dec 02 00:25:16 crc kubenswrapper[4856]: E1202 00:25:16.022554 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-ceilometer" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022566 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-ceilometer" Dec 02 00:25:16 crc kubenswrapper[4856]: E1202 00:25:16.022577 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-collectd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022583 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-collectd" Dec 02 00:25:16 crc kubenswrapper[4856]: E1202 00:25:16.022611 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e0ec98-e166-42a0-8768-f99a31f176f1" containerName="curl" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022617 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e0ec98-e166-42a0-8768-f99a31f176f1" containerName="curl" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022739 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-ceilometer" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022752 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e0ec98-e166-42a0-8768-f99a31f176f1" containerName="curl" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.022758 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc1a5ec0-f15f-4dc5-b9f8-970a43d689cc" containerName="smoketest-collectd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.023342 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.026045 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.026095 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.026411 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.027389 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.027546 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.028336 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.043652 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-74ttd"] Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082411 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082470 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082492 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082599 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082629 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcvfx\" (UniqueName: \"kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082665 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.082686 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184022 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184333 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184455 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184556 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcvfx\" (UniqueName: \"kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184682 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184793 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.184998 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185386 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185431 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185551 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185572 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185722 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.185827 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.207702 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcvfx\" (UniqueName: \"kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx\") pod \"stf-smoketest-smoke1-74ttd\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.341835 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:16 crc kubenswrapper[4856]: I1202 00:25:16.552108 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-74ttd"] Dec 02 00:25:17 crc kubenswrapper[4856]: I1202 00:25:17.341795 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerStarted","Data":"3254c3a5c3704911d4e68b45e797504ca8dcd4792f8e7066c87d70402c565c77"} Dec 02 00:25:17 crc kubenswrapper[4856]: I1202 00:25:17.342105 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerStarted","Data":"3640bac8bee5ddda435a64a432761858c97373222bb4a6aa354d4b1ff8ea4d0c"} Dec 02 00:25:17 crc kubenswrapper[4856]: I1202 00:25:17.342115 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerStarted","Data":"5962f5fe01ab399af4747b742dc6327e31a925ac07477cc19e0c62f159ec4ff4"} Dec 02 00:25:17 crc kubenswrapper[4856]: I1202 00:25:17.369053 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-74ttd" podStartSLOduration=1.369033341 podStartE2EDuration="1.369033341s" podCreationTimestamp="2025-12-02 00:25:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 00:25:17.362951691 +0000 UTC m=+1144.389319715" watchObservedRunningTime="2025-12-02 00:25:17.369033341 +0000 UTC m=+1144.395401355" Dec 02 00:25:35 crc kubenswrapper[4856]: I1202 00:25:35.061802 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:25:35 crc kubenswrapper[4856]: I1202 00:25:35.062436 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:25:48 crc kubenswrapper[4856]: I1202 00:25:48.576664 4856 generic.go:334] "Generic (PLEG): container finished" podID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerID="3254c3a5c3704911d4e68b45e797504ca8dcd4792f8e7066c87d70402c565c77" exitCode=0 Dec 02 00:25:48 crc kubenswrapper[4856]: I1202 00:25:48.577155 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerDied","Data":"3254c3a5c3704911d4e68b45e797504ca8dcd4792f8e7066c87d70402c565c77"} Dec 02 00:25:48 crc kubenswrapper[4856]: I1202 00:25:48.577675 4856 scope.go:117] "RemoveContainer" containerID="3254c3a5c3704911d4e68b45e797504ca8dcd4792f8e7066c87d70402c565c77" Dec 02 00:25:50 crc kubenswrapper[4856]: I1202 00:25:50.593555 4856 generic.go:334] "Generic (PLEG): container finished" podID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerID="3640bac8bee5ddda435a64a432761858c97373222bb4a6aa354d4b1ff8ea4d0c" exitCode=0 Dec 02 00:25:50 crc kubenswrapper[4856]: I1202 00:25:50.593643 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerDied","Data":"3640bac8bee5ddda435a64a432761858c97373222bb4a6aa354d4b1ff8ea4d0c"} Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.833373 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904391 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904506 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904606 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcvfx\" (UniqueName: \"kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904632 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904657 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904683 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.904727 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script\") pod \"6143e48f-a582-4f6d-b436-7754e8853ef2\" (UID: \"6143e48f-a582-4f6d-b436-7754e8853ef2\") " Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.909858 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx" (OuterVolumeSpecName: "kube-api-access-fcvfx") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "kube-api-access-fcvfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.925334 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.925382 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.925397 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.926563 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.926990 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:51 crc kubenswrapper[4856]: I1202 00:25:51.932732 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "6143e48f-a582-4f6d-b436-7754e8853ef2" (UID: "6143e48f-a582-4f6d-b436-7754e8853ef2"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007811 4856 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007855 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcvfx\" (UniqueName: \"kubernetes.io/projected/6143e48f-a582-4f6d-b436-7754e8853ef2-kube-api-access-fcvfx\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007867 4856 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007876 4856 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007887 4856 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007897 4856 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.007907 4856 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/6143e48f-a582-4f6d-b436-7754e8853ef2-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.608273 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-74ttd" event={"ID":"6143e48f-a582-4f6d-b436-7754e8853ef2","Type":"ContainerDied","Data":"5962f5fe01ab399af4747b742dc6327e31a925ac07477cc19e0c62f159ec4ff4"} Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.608313 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5962f5fe01ab399af4747b742dc6327e31a925ac07477cc19e0c62f159ec4ff4" Dec 02 00:25:52 crc kubenswrapper[4856]: I1202 00:25:52.608338 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-74ttd" Dec 02 00:25:53 crc kubenswrapper[4856]: I1202 00:25:53.951686 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-74ttd_6143e48f-a582-4f6d-b436-7754e8853ef2/smoketest-collectd/0.log" Dec 02 00:25:54 crc kubenswrapper[4856]: I1202 00:25:54.278144 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-74ttd_6143e48f-a582-4f6d-b436-7754e8853ef2/smoketest-ceilometer/0.log" Dec 02 00:25:54 crc kubenswrapper[4856]: I1202 00:25:54.544196 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-lfdgc_2d08f322-2d31-4b5b-a42a-edcdb7a4da4d/default-interconnect/0.log" Dec 02 00:25:54 crc kubenswrapper[4856]: I1202 00:25:54.837541 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw_f8c47b36-32a8-4485-b337-3b92535e0875/bridge/2.log" Dec 02 00:25:55 crc kubenswrapper[4856]: I1202 00:25:55.138093 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-bslvw_f8c47b36-32a8-4485-b337-3b92535e0875/sg-core/0.log" Dec 02 00:25:55 crc kubenswrapper[4856]: I1202 00:25:55.437662 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt_44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01/bridge/2.log" Dec 02 00:25:55 crc kubenswrapper[4856]: I1202 00:25:55.714088 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75567bcfdc-x5jvt_44bc0c72-f1b6-4c6b-be2d-fecd0dcb5c01/sg-core/0.log" Dec 02 00:25:55 crc kubenswrapper[4856]: I1202 00:25:55.997057 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg_16a2092a-9a92-431a-9cb4-2c760fb5b5f6/bridge/2.log" Dec 02 00:25:56 crc kubenswrapper[4856]: I1202 00:25:56.238238 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-hw5qg_16a2092a-9a92-431a-9cb4-2c760fb5b5f6/sg-core/0.log" Dec 02 00:25:56 crc kubenswrapper[4856]: I1202 00:25:56.530768 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4_ef80c4b0-49ad-4ed2-a628-007510f56965/bridge/2.log" Dec 02 00:25:56 crc kubenswrapper[4856]: I1202 00:25:56.829438 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-6d4bdc689f-vzbb4_ef80c4b0-49ad-4ed2-a628-007510f56965/sg-core/0.log" Dec 02 00:25:57 crc kubenswrapper[4856]: I1202 00:25:57.079664 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p_7896ff15-1225-4a83-898d-3c9166555c2c/bridge/2.log" Dec 02 00:25:57 crc kubenswrapper[4856]: I1202 00:25:57.357029 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-rbs6p_7896ff15-1225-4a83-898d-3c9166555c2c/sg-core/0.log" Dec 02 00:26:00 crc kubenswrapper[4856]: I1202 00:26:00.799800 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-6d757dbf4c-c925h_0ad34512-956b-4bf0-a54c-5d44f734a857/operator/0.log" Dec 02 00:26:01 crc kubenswrapper[4856]: I1202 00:26:01.117414 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_dc28fa42-75c6-4d34-8d03-6e759368f5e5/prometheus/0.log" Dec 02 00:26:01 crc kubenswrapper[4856]: I1202 00:26:01.398172 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_97ecd658-bf64-4607-9f66-6b976ea97c3c/elasticsearch/0.log" Dec 02 00:26:01 crc kubenswrapper[4856]: I1202 00:26:01.712279 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-42chm_df74219e-9434-4ae0-a7d9-61bfdad38211/prometheus-webhook-snmp/0.log" Dec 02 00:26:01 crc kubenswrapper[4856]: I1202 00:26:01.992725 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_9c32b620-4831-417b-b2bb-5bc26dc65c4d/alertmanager/0.log" Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.061801 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.063301 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.063399 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.064315 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.064391 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924" gracePeriod=600 Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.698740 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924" exitCode=0 Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.698821 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924"} Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.699092 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb"} Dec 02 00:26:05 crc kubenswrapper[4856]: I1202 00:26:05.699112 4856 scope.go:117] "RemoveContainer" containerID="e7e6284ee922e4a2a16a6b87da7e0c59263014f87eb5443234c0a212cc45aca5" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.242648 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:12 crc kubenswrapper[4856]: E1202 00:26:12.243996 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-collectd" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.244025 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-collectd" Dec 02 00:26:12 crc kubenswrapper[4856]: E1202 00:26:12.244074 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-ceilometer" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.244087 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-ceilometer" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.244383 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-collectd" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.244414 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="6143e48f-a582-4f6d-b436-7754e8853ef2" containerName="smoketest-ceilometer" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.245200 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.245518 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.400917 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c9k8\" (UniqueName: \"kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8\") pod \"infrawatch-operators-w96wt\" (UID: \"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e\") " pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.502990 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c9k8\" (UniqueName: \"kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8\") pod \"infrawatch-operators-w96wt\" (UID: \"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e\") " pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.528884 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c9k8\" (UniqueName: \"kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8\") pod \"infrawatch-operators-w96wt\" (UID: \"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e\") " pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.581302 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:12 crc kubenswrapper[4856]: I1202 00:26:12.837186 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:13 crc kubenswrapper[4856]: I1202 00:26:13.767752 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-w96wt" event={"ID":"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e","Type":"ContainerStarted","Data":"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5"} Dec 02 00:26:13 crc kubenswrapper[4856]: I1202 00:26:13.768145 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-w96wt" event={"ID":"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e","Type":"ContainerStarted","Data":"3c96d26816a9d6b59a11dc213628ba5d8ec5dd33ace47c3a80a5a5482ef6f456"} Dec 02 00:26:13 crc kubenswrapper[4856]: I1202 00:26:13.789945 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-w96wt" podStartSLOduration=1.653290165 podStartE2EDuration="1.789921209s" podCreationTimestamp="2025-12-02 00:26:12 +0000 UTC" firstStartedPulling="2025-12-02 00:26:12.844083186 +0000 UTC m=+1199.870451190" lastFinishedPulling="2025-12-02 00:26:12.98071422 +0000 UTC m=+1200.007082234" observedRunningTime="2025-12-02 00:26:13.781940133 +0000 UTC m=+1200.808308177" watchObservedRunningTime="2025-12-02 00:26:13.789921209 +0000 UTC m=+1200.816289223" Dec 02 00:26:16 crc kubenswrapper[4856]: I1202 00:26:16.088836 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-dd9844f47-gmq8n_37afc025-524b-44e8-a79b-1390846f28bb/operator/0.log" Dec 02 00:26:19 crc kubenswrapper[4856]: I1202 00:26:19.292903 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-6d757dbf4c-c925h_0ad34512-956b-4bf0-a54c-5d44f734a857/operator/0.log" Dec 02 00:26:19 crc kubenswrapper[4856]: I1202 00:26:19.533044 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_0f7dbe4c-13b8-45ac-987a-1125b1a93c6e/qdr/0.log" Dec 02 00:26:22 crc kubenswrapper[4856]: I1202 00:26:22.582077 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:22 crc kubenswrapper[4856]: I1202 00:26:22.582356 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:22 crc kubenswrapper[4856]: I1202 00:26:22.628386 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:22 crc kubenswrapper[4856]: I1202 00:26:22.861716 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.195791 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.196433 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-w96wt" podUID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" containerName="registry-server" containerID="cri-o://a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5" gracePeriod=2 Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.555914 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.698811 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c9k8\" (UniqueName: \"kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8\") pod \"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e\" (UID: \"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e\") " Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.704120 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8" (OuterVolumeSpecName: "kube-api-access-7c9k8") pod "ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" (UID: "ac725bfc-dc79-4dbe-9c59-d870e6d22c6e"). InnerVolumeSpecName "kube-api-access-7c9k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.805006 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c9k8\" (UniqueName: \"kubernetes.io/projected/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e-kube-api-access-7c9k8\") on node \"crc\" DevicePath \"\"" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.863128 4856 generic.go:334] "Generic (PLEG): container finished" podID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" containerID="a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5" exitCode=0 Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.863177 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-w96wt" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.863180 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-w96wt" event={"ID":"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e","Type":"ContainerDied","Data":"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5"} Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.863509 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-w96wt" event={"ID":"ac725bfc-dc79-4dbe-9c59-d870e6d22c6e","Type":"ContainerDied","Data":"3c96d26816a9d6b59a11dc213628ba5d8ec5dd33ace47c3a80a5a5482ef6f456"} Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.863550 4856 scope.go:117] "RemoveContainer" containerID="a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.879257 4856 scope.go:117] "RemoveContainer" containerID="a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5" Dec 02 00:26:26 crc kubenswrapper[4856]: E1202 00:26:26.879720 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5\": container with ID starting with a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5 not found: ID does not exist" containerID="a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.879841 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5"} err="failed to get container status \"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5\": rpc error: code = NotFound desc = could not find container \"a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5\": container with ID starting with a7e77d88dd18f217326f686f485aabc01878a41a060985db1eb7e48de74b88e5 not found: ID does not exist" Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.887643 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:26 crc kubenswrapper[4856]: I1202 00:26:26.892338 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-w96wt"] Dec 02 00:26:27 crc kubenswrapper[4856]: I1202 00:26:27.263339 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" path="/var/lib/kubelet/pods/ac725bfc-dc79-4dbe-9c59-d870e6d22c6e/volumes" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.522289 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pdhrp/must-gather-cq929"] Dec 02 00:26:55 crc kubenswrapper[4856]: E1202 00:26:55.523179 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" containerName="registry-server" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.523197 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" containerName="registry-server" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.523349 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac725bfc-dc79-4dbe-9c59-d870e6d22c6e" containerName="registry-server" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.524197 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.529445 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pdhrp"/"default-dockercfg-qkj66" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.529682 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pdhrp"/"kube-root-ca.crt" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.530388 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pdhrp"/"openshift-service-ca.crt" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.570811 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.570947 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s9t2\" (UniqueName: \"kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.596384 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pdhrp/must-gather-cq929"] Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.672334 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.672483 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s9t2\" (UniqueName: \"kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.672887 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.689716 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s9t2\" (UniqueName: \"kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2\") pod \"must-gather-cq929\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:55 crc kubenswrapper[4856]: I1202 00:26:55.844900 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:26:56 crc kubenswrapper[4856]: I1202 00:26:56.312450 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pdhrp/must-gather-cq929"] Dec 02 00:26:57 crc kubenswrapper[4856]: I1202 00:26:57.132466 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pdhrp/must-gather-cq929" event={"ID":"d9038c84-339d-41f3-94d9-8a2dc1d0cc66","Type":"ContainerStarted","Data":"a79f42c2608fc0aafacd2a75dc5aa14742109ae4220950f32b5d63e88680608a"} Dec 02 00:27:09 crc kubenswrapper[4856]: I1202 00:27:09.233734 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pdhrp/must-gather-cq929" event={"ID":"d9038c84-339d-41f3-94d9-8a2dc1d0cc66","Type":"ContainerStarted","Data":"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339"} Dec 02 00:27:10 crc kubenswrapper[4856]: I1202 00:27:10.240441 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pdhrp/must-gather-cq929" event={"ID":"d9038c84-339d-41f3-94d9-8a2dc1d0cc66","Type":"ContainerStarted","Data":"2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9"} Dec 02 00:27:10 crc kubenswrapper[4856]: I1202 00:27:10.252876 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pdhrp/must-gather-cq929" podStartSLOduration=2.556024082 podStartE2EDuration="15.252861837s" podCreationTimestamp="2025-12-02 00:26:55 +0000 UTC" firstStartedPulling="2025-12-02 00:26:56.325353447 +0000 UTC m=+1243.351721441" lastFinishedPulling="2025-12-02 00:27:09.022191152 +0000 UTC m=+1256.048559196" observedRunningTime="2025-12-02 00:27:10.251842032 +0000 UTC m=+1257.278210036" watchObservedRunningTime="2025-12-02 00:27:10.252861837 +0000 UTC m=+1257.279229841" Dec 02 00:27:46 crc kubenswrapper[4856]: I1202 00:27:46.125958 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-q58fj_5c276957-366f-454e-bd11-de451d27e0be/control-plane-machine-set-operator/0.log" Dec 02 00:27:46 crc kubenswrapper[4856]: I1202 00:27:46.227829 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-4ztk7_93046834-963c-4132-a184-d9541f761870/kube-rbac-proxy/0.log" Dec 02 00:27:46 crc kubenswrapper[4856]: I1202 00:27:46.240830 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-4ztk7_93046834-963c-4132-a184-d9541f761870/machine-api-operator/0.log" Dec 02 00:27:57 crc kubenswrapper[4856]: I1202 00:27:57.716981 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-bntkc_65ef0d24-261e-488f-ad30-35ae9ef4a68a/cert-manager-controller/0.log" Dec 02 00:27:57 crc kubenswrapper[4856]: I1202 00:27:57.803798 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-gxmkb_d75f8753-c27e-49df-8086-66f21b91d98a/cert-manager-cainjector/0.log" Dec 02 00:27:57 crc kubenswrapper[4856]: I1202 00:27:57.901566 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-mpnvc_c5f791d3-d7d5-4523-a510-1a73220082dd/cert-manager-webhook/0.log" Dec 02 00:28:05 crc kubenswrapper[4856]: I1202 00:28:05.061507 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:28:05 crc kubenswrapper[4856]: I1202 00:28:05.061919 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.358889 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/util/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.564554 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/util/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.582941 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/pull/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.592565 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/pull/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.729741 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/extract/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.733597 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/util/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.753951 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a5bxzv_fbd7ebc9-491d-4e80-a331-c76c0e0ebd9d/pull/0.log" Dec 02 00:28:12 crc kubenswrapper[4856]: I1202 00:28:12.994075 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.034656 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.060304 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.140129 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.296920 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.297208 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/extract/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.319024 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210rxfdm_1fd9f804-c0c5-4f64-8328-30f1e6b25a98/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.445875 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.601660 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.611333 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.616213 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.747363 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/extract/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.749701 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/util/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.770387 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f5rzg5_d110afca-827d-4691-9e3a-5804705da959/pull/0.log" Dec 02 00:28:13 crc kubenswrapper[4856]: I1202 00:28:13.892051 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/util/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.055781 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/util/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.069679 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/pull/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.090690 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/pull/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.243549 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/util/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.270639 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/extract/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.270747 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5exsdrf_dfd38b07-2dea-4521-a264-3bf95800aad7/pull/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.397136 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-utilities/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.551215 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-content/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.556004 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-utilities/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.558307 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-content/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.737828 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-utilities/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.755057 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/extract-content/0.log" Dec 02 00:28:14 crc kubenswrapper[4856]: I1202 00:28:14.905980 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-utilities/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.088331 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bgmrg_96d0fbac-8ecc-490b-a58e-0dfb4303f2b8/registry-server/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.159726 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.173013 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.173125 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-utilities/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.296762 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-utilities/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.356601 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.498978 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zbmx9_c3c49f31-0b5f-4eff-8d85-b4296f173280/registry-server/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.504934 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7r74k_d33333ae-222b-4e6a-9c34-279172c4f292/marketplace-operator/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.572477 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-utilities/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.752290 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.752994 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.780093 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-utilities/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.885141 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-content/0.log" Dec 02 00:28:15 crc kubenswrapper[4856]: I1202 00:28:15.894463 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/extract-utilities/0.log" Dec 02 00:28:16 crc kubenswrapper[4856]: I1202 00:28:16.164518 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lfjkr_ff3ec1c6-ecaf-4d6b-9fa4-ad20e2698f46/registry-server/0.log" Dec 02 00:28:27 crc kubenswrapper[4856]: I1202 00:28:27.333975 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-j272l_c56dba5d-a93c-45fb-8495-846b9098c7d1/prometheus-operator/0.log" Dec 02 00:28:27 crc kubenswrapper[4856]: I1202 00:28:27.532096 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-f5c4ddff-6hkks_edfb110d-4069-42b8-a8bf-3cf5a74ba610/prometheus-operator-admission-webhook/0.log" Dec 02 00:28:27 crc kubenswrapper[4856]: I1202 00:28:27.559461 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-f5c4ddff-v5dqs_6906a6f6-08a2-4a23-b609-4b3f37976695/prometheus-operator-admission-webhook/0.log" Dec 02 00:28:27 crc kubenswrapper[4856]: I1202 00:28:27.731624 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-2tlhq_35c7073d-b173-4200-a0fe-5df05f791e68/operator/0.log" Dec 02 00:28:27 crc kubenswrapper[4856]: I1202 00:28:27.748563 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-wz88m_909dd6ee-af48-452f-8de5-73fe740e006b/perses-operator/0.log" Dec 02 00:28:35 crc kubenswrapper[4856]: I1202 00:28:35.061566 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:28:35 crc kubenswrapper[4856]: I1202 00:28:35.062124 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.174735 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.176617 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.191656 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.244538 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b544b\" (UniqueName: \"kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.244778 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.244827 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.345919 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.346008 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.346048 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b544b\" (UniqueName: \"kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.346750 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.347294 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.372242 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b544b\" (UniqueName: \"kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b\") pod \"certified-operators-qrxqq\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.499126 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:28:54 crc kubenswrapper[4856]: I1202 00:28:54.943460 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:28:55 crc kubenswrapper[4856]: I1202 00:28:55.009697 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerStarted","Data":"675c82f7b5f939c422efbbf39542f5f25528af4812e6bb66e8d472a8673ed493"} Dec 02 00:28:56 crc kubenswrapper[4856]: I1202 00:28:56.031229 4856 generic.go:334] "Generic (PLEG): container finished" podID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerID="d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7" exitCode=0 Dec 02 00:28:56 crc kubenswrapper[4856]: I1202 00:28:56.032168 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerDied","Data":"d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7"} Dec 02 00:28:56 crc kubenswrapper[4856]: I1202 00:28:56.034544 4856 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 00:28:57 crc kubenswrapper[4856]: I1202 00:28:57.042127 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerStarted","Data":"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e"} Dec 02 00:28:58 crc kubenswrapper[4856]: I1202 00:28:58.061120 4856 generic.go:334] "Generic (PLEG): container finished" podID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerID="a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e" exitCode=0 Dec 02 00:28:58 crc kubenswrapper[4856]: I1202 00:28:58.061165 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerDied","Data":"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e"} Dec 02 00:28:59 crc kubenswrapper[4856]: I1202 00:28:59.072307 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerStarted","Data":"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f"} Dec 02 00:28:59 crc kubenswrapper[4856]: I1202 00:28:59.099630 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qrxqq" podStartSLOduration=2.62302328 podStartE2EDuration="5.099612256s" podCreationTimestamp="2025-12-02 00:28:54 +0000 UTC" firstStartedPulling="2025-12-02 00:28:56.034333938 +0000 UTC m=+1363.060701942" lastFinishedPulling="2025-12-02 00:28:58.510922904 +0000 UTC m=+1365.537290918" observedRunningTime="2025-12-02 00:28:59.094862228 +0000 UTC m=+1366.121230242" watchObservedRunningTime="2025-12-02 00:28:59.099612256 +0000 UTC m=+1366.125980260" Dec 02 00:29:04 crc kubenswrapper[4856]: I1202 00:29:04.499477 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:04 crc kubenswrapper[4856]: I1202 00:29:04.500368 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:04 crc kubenswrapper[4856]: I1202 00:29:04.566772 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.061689 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.061759 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.061803 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.062326 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.062380 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb" gracePeriod=600 Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.157916 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:05 crc kubenswrapper[4856]: I1202 00:29:05.200671 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:29:06 crc kubenswrapper[4856]: I1202 00:29:06.128505 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb" exitCode=0 Dec 02 00:29:06 crc kubenswrapper[4856]: I1202 00:29:06.128774 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb"} Dec 02 00:29:06 crc kubenswrapper[4856]: I1202 00:29:06.129076 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerStarted","Data":"157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee"} Dec 02 00:29:06 crc kubenswrapper[4856]: I1202 00:29:06.129116 4856 scope.go:117] "RemoveContainer" containerID="29e2f8d4696162c413a025531de65a7fa0caff678050b80073dc7b7e99207924" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.136550 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qrxqq" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="registry-server" containerID="cri-o://9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f" gracePeriod=2 Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.657115 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.745961 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities\") pod \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.746193 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b544b\" (UniqueName: \"kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b\") pod \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.746273 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content\") pod \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\" (UID: \"cc00c39b-8271-4581-8a1e-9fcfe32fdac1\") " Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.765142 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities" (OuterVolumeSpecName: "utilities") pod "cc00c39b-8271-4581-8a1e-9fcfe32fdac1" (UID: "cc00c39b-8271-4581-8a1e-9fcfe32fdac1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.769677 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b" (OuterVolumeSpecName: "kube-api-access-b544b") pod "cc00c39b-8271-4581-8a1e-9fcfe32fdac1" (UID: "cc00c39b-8271-4581-8a1e-9fcfe32fdac1"). InnerVolumeSpecName "kube-api-access-b544b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.848011 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.848057 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b544b\" (UniqueName: \"kubernetes.io/projected/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-kube-api-access-b544b\") on node \"crc\" DevicePath \"\"" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.870498 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc00c39b-8271-4581-8a1e-9fcfe32fdac1" (UID: "cc00c39b-8271-4581-8a1e-9fcfe32fdac1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:29:07 crc kubenswrapper[4856]: I1202 00:29:07.949233 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc00c39b-8271-4581-8a1e-9fcfe32fdac1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.145802 4856 generic.go:334] "Generic (PLEG): container finished" podID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerID="9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f" exitCode=0 Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.145850 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerDied","Data":"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f"} Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.145880 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrxqq" event={"ID":"cc00c39b-8271-4581-8a1e-9fcfe32fdac1","Type":"ContainerDied","Data":"675c82f7b5f939c422efbbf39542f5f25528af4812e6bb66e8d472a8673ed493"} Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.145885 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrxqq" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.145899 4856 scope.go:117] "RemoveContainer" containerID="9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.161881 4856 scope.go:117] "RemoveContainer" containerID="a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.179040 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.189365 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qrxqq"] Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.192613 4856 scope.go:117] "RemoveContainer" containerID="d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.213918 4856 scope.go:117] "RemoveContainer" containerID="9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f" Dec 02 00:29:08 crc kubenswrapper[4856]: E1202 00:29:08.214378 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f\": container with ID starting with 9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f not found: ID does not exist" containerID="9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.214527 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f"} err="failed to get container status \"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f\": rpc error: code = NotFound desc = could not find container \"9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f\": container with ID starting with 9dd97f7b6929a5e28a16ec590c83418dede6e94175c86aa58e8fe0857bbf073f not found: ID does not exist" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.214639 4856 scope.go:117] "RemoveContainer" containerID="a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e" Dec 02 00:29:08 crc kubenswrapper[4856]: E1202 00:29:08.214914 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e\": container with ID starting with a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e not found: ID does not exist" containerID="a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.215022 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e"} err="failed to get container status \"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e\": rpc error: code = NotFound desc = could not find container \"a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e\": container with ID starting with a1c4dbbab157e2f6e1bd64bc70ea906444506de6f59644a27da89a44a84a9c5e not found: ID does not exist" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.215122 4856 scope.go:117] "RemoveContainer" containerID="d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7" Dec 02 00:29:08 crc kubenswrapper[4856]: E1202 00:29:08.216800 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7\": container with ID starting with d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7 not found: ID does not exist" containerID="d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7" Dec 02 00:29:08 crc kubenswrapper[4856]: I1202 00:29:08.216845 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7"} err="failed to get container status \"d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7\": rpc error: code = NotFound desc = could not find container \"d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7\": container with ID starting with d7469c5c41fc49d103152b9d9504fa05ccf979a167d4e6a4e209c0eec65162f7 not found: ID does not exist" Dec 02 00:29:09 crc kubenswrapper[4856]: I1202 00:29:09.267198 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" path="/var/lib/kubelet/pods/cc00c39b-8271-4581-8a1e-9fcfe32fdac1/volumes" Dec 02 00:29:13 crc kubenswrapper[4856]: I1202 00:29:13.182974 4856 generic.go:334] "Generic (PLEG): container finished" podID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerID="9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339" exitCode=0 Dec 02 00:29:13 crc kubenswrapper[4856]: I1202 00:29:13.183017 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pdhrp/must-gather-cq929" event={"ID":"d9038c84-339d-41f3-94d9-8a2dc1d0cc66","Type":"ContainerDied","Data":"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339"} Dec 02 00:29:13 crc kubenswrapper[4856]: I1202 00:29:13.183911 4856 scope.go:117] "RemoveContainer" containerID="9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339" Dec 02 00:29:13 crc kubenswrapper[4856]: I1202 00:29:13.798488 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pdhrp_must-gather-cq929_d9038c84-339d-41f3-94d9-8a2dc1d0cc66/gather/0.log" Dec 02 00:29:14 crc kubenswrapper[4856]: E1202 00:29:14.944856 4856 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.222:33512->38.102.83.222:41585: write tcp 38.102.83.222:33512->38.102.83.222:41585: write: broken pipe Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.135462 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pdhrp/must-gather-cq929"] Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.136757 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-pdhrp/must-gather-cq929" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="copy" containerID="cri-o://2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9" gracePeriod=2 Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.139319 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pdhrp/must-gather-cq929"] Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.555436 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pdhrp_must-gather-cq929_d9038c84-339d-41f3-94d9-8a2dc1d0cc66/copy/0.log" Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.556312 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.636865 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8s9t2\" (UniqueName: \"kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2\") pod \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.636972 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output\") pod \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\" (UID: \"d9038c84-339d-41f3-94d9-8a2dc1d0cc66\") " Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.643508 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2" (OuterVolumeSpecName: "kube-api-access-8s9t2") pod "d9038c84-339d-41f3-94d9-8a2dc1d0cc66" (UID: "d9038c84-339d-41f3-94d9-8a2dc1d0cc66"). InnerVolumeSpecName "kube-api-access-8s9t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.699989 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d9038c84-339d-41f3-94d9-8a2dc1d0cc66" (UID: "d9038c84-339d-41f3-94d9-8a2dc1d0cc66"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.738889 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8s9t2\" (UniqueName: \"kubernetes.io/projected/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-kube-api-access-8s9t2\") on node \"crc\" DevicePath \"\"" Dec 02 00:29:20 crc kubenswrapper[4856]: I1202 00:29:20.739339 4856 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9038c84-339d-41f3-94d9-8a2dc1d0cc66-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.242191 4856 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pdhrp_must-gather-cq929_d9038c84-339d-41f3-94d9-8a2dc1d0cc66/copy/0.log" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.244321 4856 generic.go:334] "Generic (PLEG): container finished" podID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerID="2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9" exitCode=143 Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.244384 4856 scope.go:117] "RemoveContainer" containerID="2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.244434 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pdhrp/must-gather-cq929" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.268462 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" path="/var/lib/kubelet/pods/d9038c84-339d-41f3-94d9-8a2dc1d0cc66/volumes" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.273605 4856 scope.go:117] "RemoveContainer" containerID="9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.315363 4856 scope.go:117] "RemoveContainer" containerID="2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9" Dec 02 00:29:21 crc kubenswrapper[4856]: E1202 00:29:21.316292 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9\": container with ID starting with 2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9 not found: ID does not exist" containerID="2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.316333 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9"} err="failed to get container status \"2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9\": rpc error: code = NotFound desc = could not find container \"2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9\": container with ID starting with 2801867f31a33b2143c3f6318d9995950e21d9b115e4d2f2d11cd383a0fe06d9 not found: ID does not exist" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.316365 4856 scope.go:117] "RemoveContainer" containerID="9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339" Dec 02 00:29:21 crc kubenswrapper[4856]: E1202 00:29:21.316941 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339\": container with ID starting with 9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339 not found: ID does not exist" containerID="9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339" Dec 02 00:29:21 crc kubenswrapper[4856]: I1202 00:29:21.316998 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339"} err="failed to get container status \"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339\": rpc error: code = NotFound desc = could not find container \"9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339\": container with ID starting with 9af996e2e901c48220f23ba8b5972342e64f7865d8f1f7dae89a82b9a7bdf339 not found: ID does not exist" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.142946 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q"] Dec 02 00:30:00 crc kubenswrapper[4856]: E1202 00:30:00.144804 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="registry-server" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.144822 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="registry-server" Dec 02 00:30:00 crc kubenswrapper[4856]: E1202 00:30:00.144838 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="extract-utilities" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.144845 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="extract-utilities" Dec 02 00:30:00 crc kubenswrapper[4856]: E1202 00:30:00.144858 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="gather" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.144865 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="gather" Dec 02 00:30:00 crc kubenswrapper[4856]: E1202 00:30:00.144887 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="copy" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.144894 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="copy" Dec 02 00:30:00 crc kubenswrapper[4856]: E1202 00:30:00.144905 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="extract-content" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.144911 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="extract-content" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.145059 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="copy" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.145075 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc00c39b-8271-4581-8a1e-9fcfe32fdac1" containerName="registry-server" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.145091 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9038c84-339d-41f3-94d9-8a2dc1d0cc66" containerName="gather" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.145615 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.147967 4856 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.148405 4856 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.148630 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q"] Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.239177 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.239274 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf2mv\" (UniqueName: \"kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.239314 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.340924 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.341012 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf2mv\" (UniqueName: \"kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.341053 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.342129 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.346529 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.357487 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf2mv\" (UniqueName: \"kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv\") pod \"collect-profiles-29410590-8jn5q\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.494889 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:00 crc kubenswrapper[4856]: I1202 00:30:00.664793 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q"] Dec 02 00:30:01 crc kubenswrapper[4856]: I1202 00:30:01.573405 4856 generic.go:334] "Generic (PLEG): container finished" podID="7a1e816a-99ab-407f-bc08-89dd65ee1564" containerID="8c5fe09a4331534cfb7861c7b12b0aa9831d745a2d9ef32d075b07fe2efc5eea" exitCode=0 Dec 02 00:30:01 crc kubenswrapper[4856]: I1202 00:30:01.573451 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" event={"ID":"7a1e816a-99ab-407f-bc08-89dd65ee1564","Type":"ContainerDied","Data":"8c5fe09a4331534cfb7861c7b12b0aa9831d745a2d9ef32d075b07fe2efc5eea"} Dec 02 00:30:01 crc kubenswrapper[4856]: I1202 00:30:01.573649 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" event={"ID":"7a1e816a-99ab-407f-bc08-89dd65ee1564","Type":"ContainerStarted","Data":"4488331589d434bb6d323f3f4c3c691f20a14d057242b5b57ef853089060b3de"} Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.811979 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.975548 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf2mv\" (UniqueName: \"kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv\") pod \"7a1e816a-99ab-407f-bc08-89dd65ee1564\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.976079 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume\") pod \"7a1e816a-99ab-407f-bc08-89dd65ee1564\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.976317 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume\") pod \"7a1e816a-99ab-407f-bc08-89dd65ee1564\" (UID: \"7a1e816a-99ab-407f-bc08-89dd65ee1564\") " Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.976713 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume" (OuterVolumeSpecName: "config-volume") pod "7a1e816a-99ab-407f-bc08-89dd65ee1564" (UID: "7a1e816a-99ab-407f-bc08-89dd65ee1564"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.977068 4856 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7a1e816a-99ab-407f-bc08-89dd65ee1564-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.982914 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7a1e816a-99ab-407f-bc08-89dd65ee1564" (UID: "7a1e816a-99ab-407f-bc08-89dd65ee1564"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 00:30:02 crc kubenswrapper[4856]: I1202 00:30:02.982951 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv" (OuterVolumeSpecName: "kube-api-access-wf2mv") pod "7a1e816a-99ab-407f-bc08-89dd65ee1564" (UID: "7a1e816a-99ab-407f-bc08-89dd65ee1564"). InnerVolumeSpecName "kube-api-access-wf2mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:30:03 crc kubenswrapper[4856]: I1202 00:30:03.078536 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf2mv\" (UniqueName: \"kubernetes.io/projected/7a1e816a-99ab-407f-bc08-89dd65ee1564-kube-api-access-wf2mv\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:03 crc kubenswrapper[4856]: I1202 00:30:03.078570 4856 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7a1e816a-99ab-407f-bc08-89dd65ee1564-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:03 crc kubenswrapper[4856]: I1202 00:30:03.590861 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" event={"ID":"7a1e816a-99ab-407f-bc08-89dd65ee1564","Type":"ContainerDied","Data":"4488331589d434bb6d323f3f4c3c691f20a14d057242b5b57ef853089060b3de"} Dec 02 00:30:03 crc kubenswrapper[4856]: I1202 00:30:03.591208 4856 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4488331589d434bb6d323f3f4c3c691f20a14d057242b5b57ef853089060b3de" Dec 02 00:30:03 crc kubenswrapper[4856]: I1202 00:30:03.590895 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410590-8jn5q" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.572010 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:18 crc kubenswrapper[4856]: E1202 00:30:18.572741 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a1e816a-99ab-407f-bc08-89dd65ee1564" containerName="collect-profiles" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.572753 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a1e816a-99ab-407f-bc08-89dd65ee1564" containerName="collect-profiles" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.572878 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a1e816a-99ab-407f-bc08-89dd65ee1564" containerName="collect-profiles" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.573726 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.581385 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.708063 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.708152 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.708185 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnggw\" (UniqueName: \"kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.809573 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.809711 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.809747 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnggw\" (UniqueName: \"kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.810135 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.810162 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.835832 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnggw\" (UniqueName: \"kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw\") pod \"redhat-operators-vvwf6\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:18 crc kubenswrapper[4856]: I1202 00:30:18.891749 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:19 crc kubenswrapper[4856]: I1202 00:30:19.096936 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:19 crc kubenswrapper[4856]: I1202 00:30:19.719316 4856 generic.go:334] "Generic (PLEG): container finished" podID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerID="016467ec3ac3d1d1a74360fa889b69bf5c39c6de0fa23294bf28f1777d5f14f6" exitCode=0 Dec 02 00:30:19 crc kubenswrapper[4856]: I1202 00:30:19.719364 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerDied","Data":"016467ec3ac3d1d1a74360fa889b69bf5c39c6de0fa23294bf28f1777d5f14f6"} Dec 02 00:30:19 crc kubenswrapper[4856]: I1202 00:30:19.719392 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerStarted","Data":"3281c9c07a40ba6ae4505ae54321640cc0a1923408b2cd6301a31764952aa4c3"} Dec 02 00:30:21 crc kubenswrapper[4856]: I1202 00:30:21.733653 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerStarted","Data":"22c8293a22e9ab2fd98aa7884c8dc71b3ff608bbb86aa024740dab9aa5f29d8b"} Dec 02 00:30:22 crc kubenswrapper[4856]: I1202 00:30:22.741010 4856 generic.go:334] "Generic (PLEG): container finished" podID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerID="22c8293a22e9ab2fd98aa7884c8dc71b3ff608bbb86aa024740dab9aa5f29d8b" exitCode=0 Dec 02 00:30:22 crc kubenswrapper[4856]: I1202 00:30:22.741051 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerDied","Data":"22c8293a22e9ab2fd98aa7884c8dc71b3ff608bbb86aa024740dab9aa5f29d8b"} Dec 02 00:30:24 crc kubenswrapper[4856]: I1202 00:30:24.756650 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerStarted","Data":"7a83ff07806759e72e3f6eea43d6ea88c0141ddfa82b65f21234a2dcc84967e4"} Dec 02 00:30:24 crc kubenswrapper[4856]: I1202 00:30:24.784367 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vvwf6" podStartSLOduration=2.533870787 podStartE2EDuration="6.78433702s" podCreationTimestamp="2025-12-02 00:30:18 +0000 UTC" firstStartedPulling="2025-12-02 00:30:19.720572656 +0000 UTC m=+1446.746940660" lastFinishedPulling="2025-12-02 00:30:23.971038889 +0000 UTC m=+1450.997406893" observedRunningTime="2025-12-02 00:30:24.781025059 +0000 UTC m=+1451.807393103" watchObservedRunningTime="2025-12-02 00:30:24.78433702 +0000 UTC m=+1451.810705054" Dec 02 00:30:28 crc kubenswrapper[4856]: I1202 00:30:28.892062 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:28 crc kubenswrapper[4856]: I1202 00:30:28.892343 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:29 crc kubenswrapper[4856]: I1202 00:30:29.930542 4856 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vvwf6" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="registry-server" probeResult="failure" output=< Dec 02 00:30:29 crc kubenswrapper[4856]: timeout: failed to connect service ":50051" within 1s Dec 02 00:30:29 crc kubenswrapper[4856]: > Dec 02 00:30:38 crc kubenswrapper[4856]: I1202 00:30:38.934251 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:38 crc kubenswrapper[4856]: I1202 00:30:38.983176 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:39 crc kubenswrapper[4856]: I1202 00:30:39.194266 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:40 crc kubenswrapper[4856]: I1202 00:30:40.877734 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vvwf6" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="registry-server" containerID="cri-o://7a83ff07806759e72e3f6eea43d6ea88c0141ddfa82b65f21234a2dcc84967e4" gracePeriod=2 Dec 02 00:30:42 crc kubenswrapper[4856]: I1202 00:30:42.893231 4856 generic.go:334] "Generic (PLEG): container finished" podID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerID="7a83ff07806759e72e3f6eea43d6ea88c0141ddfa82b65f21234a2dcc84967e4" exitCode=0 Dec 02 00:30:42 crc kubenswrapper[4856]: I1202 00:30:42.893897 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerDied","Data":"7a83ff07806759e72e3f6eea43d6ea88c0141ddfa82b65f21234a2dcc84967e4"} Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.109706 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.277156 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnggw\" (UniqueName: \"kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw\") pod \"ac4b0811-817b-49af-a7e1-55e004ae1273\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.277192 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") pod \"ac4b0811-817b-49af-a7e1-55e004ae1273\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.277298 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities\") pod \"ac4b0811-817b-49af-a7e1-55e004ae1273\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.278359 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities" (OuterVolumeSpecName: "utilities") pod "ac4b0811-817b-49af-a7e1-55e004ae1273" (UID: "ac4b0811-817b-49af-a7e1-55e004ae1273"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.285397 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw" (OuterVolumeSpecName: "kube-api-access-rnggw") pod "ac4b0811-817b-49af-a7e1-55e004ae1273" (UID: "ac4b0811-817b-49af-a7e1-55e004ae1273"). InnerVolumeSpecName "kube-api-access-rnggw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.378697 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac4b0811-817b-49af-a7e1-55e004ae1273" (UID: "ac4b0811-817b-49af-a7e1-55e004ae1273"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.378802 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") pod \"ac4b0811-817b-49af-a7e1-55e004ae1273\" (UID: \"ac4b0811-817b-49af-a7e1-55e004ae1273\") " Dec 02 00:30:43 crc kubenswrapper[4856]: W1202 00:30:43.378936 4856 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ac4b0811-817b-49af-a7e1-55e004ae1273/volumes/kubernetes.io~empty-dir/catalog-content Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.378949 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac4b0811-817b-49af-a7e1-55e004ae1273" (UID: "ac4b0811-817b-49af-a7e1-55e004ae1273"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.379782 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.380206 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnggw\" (UniqueName: \"kubernetes.io/projected/ac4b0811-817b-49af-a7e1-55e004ae1273-kube-api-access-rnggw\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.380264 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac4b0811-817b-49af-a7e1-55e004ae1273-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.902649 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vvwf6" event={"ID":"ac4b0811-817b-49af-a7e1-55e004ae1273","Type":"ContainerDied","Data":"3281c9c07a40ba6ae4505ae54321640cc0a1923408b2cd6301a31764952aa4c3"} Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.902743 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vvwf6" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.903074 4856 scope.go:117] "RemoveContainer" containerID="7a83ff07806759e72e3f6eea43d6ea88c0141ddfa82b65f21234a2dcc84967e4" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.934500 4856 scope.go:117] "RemoveContainer" containerID="22c8293a22e9ab2fd98aa7884c8dc71b3ff608bbb86aa024740dab9aa5f29d8b" Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.936273 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.942005 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vvwf6"] Dec 02 00:30:43 crc kubenswrapper[4856]: I1202 00:30:43.956001 4856 scope.go:117] "RemoveContainer" containerID="016467ec3ac3d1d1a74360fa889b69bf5c39c6de0fa23294bf28f1777d5f14f6" Dec 02 00:30:45 crc kubenswrapper[4856]: I1202 00:30:45.263740 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" path="/var/lib/kubelet/pods/ac4b0811-817b-49af-a7e1-55e004ae1273/volumes" Dec 02 00:31:05 crc kubenswrapper[4856]: I1202 00:31:05.061833 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:31:05 crc kubenswrapper[4856]: I1202 00:31:05.062375 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:31:35 crc kubenswrapper[4856]: I1202 00:31:35.062274 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:31:35 crc kubenswrapper[4856]: I1202 00:31:35.062744 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.061348 4856 patch_prober.go:28] interesting pod/machine-config-daemon-455ww container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.062122 4856 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.062173 4856 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-455ww" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.062993 4856 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee"} pod="openshift-machine-config-operator/machine-config-daemon-455ww" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.063067 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerName="machine-config-daemon" containerID="cri-o://157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" gracePeriod=600 Dec 02 00:32:05 crc kubenswrapper[4856]: E1202 00:32:05.189305 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.554457 4856 generic.go:334] "Generic (PLEG): container finished" podID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" exitCode=0 Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.554499 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-455ww" event={"ID":"0271f00d-b420-4dee-aa8b-92d6fc294b2a","Type":"ContainerDied","Data":"157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee"} Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.554540 4856 scope.go:117] "RemoveContainer" containerID="fb7ff0312f1383e8fbf6b0241feb022dd38d0b629331ff47c6869482a6ef16cb" Dec 02 00:32:05 crc kubenswrapper[4856]: I1202 00:32:05.555040 4856 scope.go:117] "RemoveContainer" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" Dec 02 00:32:05 crc kubenswrapper[4856]: E1202 00:32:05.555299 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.147746 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:12 crc kubenswrapper[4856]: E1202 00:32:12.149082 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="extract-utilities" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.149115 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="extract-utilities" Dec 02 00:32:12 crc kubenswrapper[4856]: E1202 00:32:12.149142 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="registry-server" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.149158 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="registry-server" Dec 02 00:32:12 crc kubenswrapper[4856]: E1202 00:32:12.149202 4856 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="extract-content" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.149219 4856 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="extract-content" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.149534 4856 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac4b0811-817b-49af-a7e1-55e004ae1273" containerName="registry-server" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.150368 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.169194 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.289315 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzqmx\" (UniqueName: \"kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx\") pod \"infrawatch-operators-4g4f5\" (UID: \"631ac5d8-6f46-4b2f-8140-5eed739c637d\") " pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.391550 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzqmx\" (UniqueName: \"kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx\") pod \"infrawatch-operators-4g4f5\" (UID: \"631ac5d8-6f46-4b2f-8140-5eed739c637d\") " pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.416338 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzqmx\" (UniqueName: \"kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx\") pod \"infrawatch-operators-4g4f5\" (UID: \"631ac5d8-6f46-4b2f-8140-5eed739c637d\") " pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:12 crc kubenswrapper[4856]: I1202 00:32:12.515953 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:13 crc kubenswrapper[4856]: I1202 00:32:13.008869 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:13 crc kubenswrapper[4856]: W1202 00:32:13.019231 4856 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod631ac5d8_6f46_4b2f_8140_5eed739c637d.slice/crio-977c321f9be67dde55a03ed08f591f2895e91d0506f2e35b8b8d4931eff2bec1 WatchSource:0}: Error finding container 977c321f9be67dde55a03ed08f591f2895e91d0506f2e35b8b8d4931eff2bec1: Status 404 returned error can't find the container with id 977c321f9be67dde55a03ed08f591f2895e91d0506f2e35b8b8d4931eff2bec1 Dec 02 00:32:13 crc kubenswrapper[4856]: I1202 00:32:13.643942 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-4g4f5" event={"ID":"631ac5d8-6f46-4b2f-8140-5eed739c637d","Type":"ContainerStarted","Data":"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3"} Dec 02 00:32:13 crc kubenswrapper[4856]: I1202 00:32:13.644387 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-4g4f5" event={"ID":"631ac5d8-6f46-4b2f-8140-5eed739c637d","Type":"ContainerStarted","Data":"977c321f9be67dde55a03ed08f591f2895e91d0506f2e35b8b8d4931eff2bec1"} Dec 02 00:32:13 crc kubenswrapper[4856]: I1202 00:32:13.675959 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-4g4f5" podStartSLOduration=1.565291168 podStartE2EDuration="1.675942728s" podCreationTimestamp="2025-12-02 00:32:12 +0000 UTC" firstStartedPulling="2025-12-02 00:32:13.022669851 +0000 UTC m=+1560.049037855" lastFinishedPulling="2025-12-02 00:32:13.133321411 +0000 UTC m=+1560.159689415" observedRunningTime="2025-12-02 00:32:13.673364444 +0000 UTC m=+1560.699732458" watchObservedRunningTime="2025-12-02 00:32:13.675942728 +0000 UTC m=+1560.702310742" Dec 02 00:32:18 crc kubenswrapper[4856]: I1202 00:32:18.919789 4856 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:18 crc kubenswrapper[4856]: I1202 00:32:18.922091 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:18 crc kubenswrapper[4856]: I1202 00:32:18.928042 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.093558 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.093723 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.093833 4856 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhzmr\" (UniqueName: \"kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.195057 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.195147 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhzmr\" (UniqueName: \"kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.195210 4856 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.196159 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.196197 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.216268 4856 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhzmr\" (UniqueName: \"kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr\") pod \"community-operators-z5ngb\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.247832 4856 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.670324 4856 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:19 crc kubenswrapper[4856]: I1202 00:32:19.689159 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerStarted","Data":"95ce1689dfb619d47d831a06e92b8756332f4709a6b84223005f4b4050ce1866"} Dec 02 00:32:20 crc kubenswrapper[4856]: I1202 00:32:20.251753 4856 scope.go:117] "RemoveContainer" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" Dec 02 00:32:20 crc kubenswrapper[4856]: E1202 00:32:20.251996 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" Dec 02 00:32:20 crc kubenswrapper[4856]: I1202 00:32:20.701086 4856 generic.go:334] "Generic (PLEG): container finished" podID="5aa08166-3116-4ac1-b4f3-4632e545182f" containerID="c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983" exitCode=0 Dec 02 00:32:20 crc kubenswrapper[4856]: I1202 00:32:20.701151 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerDied","Data":"c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983"} Dec 02 00:32:21 crc kubenswrapper[4856]: I1202 00:32:21.710865 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerStarted","Data":"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7"} Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.517245 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.517704 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.556211 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.718313 4856 generic.go:334] "Generic (PLEG): container finished" podID="5aa08166-3116-4ac1-b4f3-4632e545182f" containerID="6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7" exitCode=0 Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.718415 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerDied","Data":"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7"} Dec 02 00:32:22 crc kubenswrapper[4856]: I1202 00:32:22.747119 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:24 crc kubenswrapper[4856]: I1202 00:32:24.757194 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerStarted","Data":"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6"} Dec 02 00:32:24 crc kubenswrapper[4856]: I1202 00:32:24.904114 4856 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z5ngb" podStartSLOduration=3.52915104 podStartE2EDuration="6.904095241s" podCreationTimestamp="2025-12-02 00:32:18 +0000 UTC" firstStartedPulling="2025-12-02 00:32:20.703095386 +0000 UTC m=+1567.729463390" lastFinishedPulling="2025-12-02 00:32:24.078039567 +0000 UTC m=+1571.104407591" observedRunningTime="2025-12-02 00:32:24.779367825 +0000 UTC m=+1571.805735869" watchObservedRunningTime="2025-12-02 00:32:24.904095241 +0000 UTC m=+1571.930463245" Dec 02 00:32:24 crc kubenswrapper[4856]: I1202 00:32:24.909583 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:24 crc kubenswrapper[4856]: I1202 00:32:24.909799 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-4g4f5" podUID="631ac5d8-6f46-4b2f-8140-5eed739c637d" containerName="registry-server" containerID="cri-o://d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3" gracePeriod=2 Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.354468 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.492199 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzqmx\" (UniqueName: \"kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx\") pod \"631ac5d8-6f46-4b2f-8140-5eed739c637d\" (UID: \"631ac5d8-6f46-4b2f-8140-5eed739c637d\") " Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.497551 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx" (OuterVolumeSpecName: "kube-api-access-jzqmx") pod "631ac5d8-6f46-4b2f-8140-5eed739c637d" (UID: "631ac5d8-6f46-4b2f-8140-5eed739c637d"). InnerVolumeSpecName "kube-api-access-jzqmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.593742 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzqmx\" (UniqueName: \"kubernetes.io/projected/631ac5d8-6f46-4b2f-8140-5eed739c637d-kube-api-access-jzqmx\") on node \"crc\" DevicePath \"\"" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.766451 4856 generic.go:334] "Generic (PLEG): container finished" podID="631ac5d8-6f46-4b2f-8140-5eed739c637d" containerID="d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3" exitCode=0 Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.766545 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-4g4f5" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.766547 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-4g4f5" event={"ID":"631ac5d8-6f46-4b2f-8140-5eed739c637d","Type":"ContainerDied","Data":"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3"} Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.766719 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-4g4f5" event={"ID":"631ac5d8-6f46-4b2f-8140-5eed739c637d","Type":"ContainerDied","Data":"977c321f9be67dde55a03ed08f591f2895e91d0506f2e35b8b8d4931eff2bec1"} Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.766752 4856 scope.go:117] "RemoveContainer" containerID="d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.806037 4856 scope.go:117] "RemoveContainer" containerID="d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3" Dec 02 00:32:25 crc kubenswrapper[4856]: E1202 00:32:25.806498 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3\": container with ID starting with d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3 not found: ID does not exist" containerID="d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.806535 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3"} err="failed to get container status \"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3\": rpc error: code = NotFound desc = could not find container \"d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3\": container with ID starting with d218828b0c4487d1825234f962133b8d0fbe1886321dc48f0cab3279acc19eb3 not found: ID does not exist" Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.808344 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:25 crc kubenswrapper[4856]: I1202 00:32:25.815257 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-4g4f5"] Dec 02 00:32:27 crc kubenswrapper[4856]: I1202 00:32:27.262277 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="631ac5d8-6f46-4b2f-8140-5eed739c637d" path="/var/lib/kubelet/pods/631ac5d8-6f46-4b2f-8140-5eed739c637d/volumes" Dec 02 00:32:29 crc kubenswrapper[4856]: I1202 00:32:29.248155 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:29 crc kubenswrapper[4856]: I1202 00:32:29.248220 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:29 crc kubenswrapper[4856]: I1202 00:32:29.297700 4856 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:29 crc kubenswrapper[4856]: I1202 00:32:29.853934 4856 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:30 crc kubenswrapper[4856]: I1202 00:32:30.309355 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:31 crc kubenswrapper[4856]: I1202 00:32:31.814170 4856 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z5ngb" podUID="5aa08166-3116-4ac1-b4f3-4632e545182f" containerName="registry-server" containerID="cri-o://bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6" gracePeriod=2 Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.253054 4856 scope.go:117] "RemoveContainer" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" Dec 02 00:32:32 crc kubenswrapper[4856]: E1202 00:32:32.253429 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.672456 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.796111 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities\") pod \"5aa08166-3116-4ac1-b4f3-4632e545182f\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.796203 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhzmr\" (UniqueName: \"kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr\") pod \"5aa08166-3116-4ac1-b4f3-4632e545182f\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.796256 4856 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content\") pod \"5aa08166-3116-4ac1-b4f3-4632e545182f\" (UID: \"5aa08166-3116-4ac1-b4f3-4632e545182f\") " Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.797062 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities" (OuterVolumeSpecName: "utilities") pod "5aa08166-3116-4ac1-b4f3-4632e545182f" (UID: "5aa08166-3116-4ac1-b4f3-4632e545182f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.805396 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr" (OuterVolumeSpecName: "kube-api-access-mhzmr") pod "5aa08166-3116-4ac1-b4f3-4632e545182f" (UID: "5aa08166-3116-4ac1-b4f3-4632e545182f"). InnerVolumeSpecName "kube-api-access-mhzmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.830170 4856 generic.go:334] "Generic (PLEG): container finished" podID="5aa08166-3116-4ac1-b4f3-4632e545182f" containerID="bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6" exitCode=0 Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.830261 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerDied","Data":"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6"} Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.830299 4856 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5ngb" event={"ID":"5aa08166-3116-4ac1-b4f3-4632e545182f","Type":"ContainerDied","Data":"95ce1689dfb619d47d831a06e92b8756332f4709a6b84223005f4b4050ce1866"} Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.830324 4856 scope.go:117] "RemoveContainer" containerID="bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.830566 4856 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5ngb" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.857552 4856 scope.go:117] "RemoveContainer" containerID="6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.869795 4856 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5aa08166-3116-4ac1-b4f3-4632e545182f" (UID: "5aa08166-3116-4ac1-b4f3-4632e545182f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.872631 4856 scope.go:117] "RemoveContainer" containerID="c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.890814 4856 scope.go:117] "RemoveContainer" containerID="bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6" Dec 02 00:32:32 crc kubenswrapper[4856]: E1202 00:32:32.891341 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6\": container with ID starting with bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6 not found: ID does not exist" containerID="bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.891474 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6"} err="failed to get container status \"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6\": rpc error: code = NotFound desc = could not find container \"bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6\": container with ID starting with bd7c11b8ff8f561bfc47be86e253be1811ce13b58af4edb79cdf07ec84b575c6 not found: ID does not exist" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.891554 4856 scope.go:117] "RemoveContainer" containerID="6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7" Dec 02 00:32:32 crc kubenswrapper[4856]: E1202 00:32:32.891988 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7\": container with ID starting with 6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7 not found: ID does not exist" containerID="6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.892042 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7"} err="failed to get container status \"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7\": rpc error: code = NotFound desc = could not find container \"6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7\": container with ID starting with 6a063d734d7eed192658cedb6c85f3c11a52fbfe477bbbb1b338e20eb7ed18d7 not found: ID does not exist" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.892062 4856 scope.go:117] "RemoveContainer" containerID="c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983" Dec 02 00:32:32 crc kubenswrapper[4856]: E1202 00:32:32.892343 4856 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983\": container with ID starting with c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983 not found: ID does not exist" containerID="c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.892368 4856 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983"} err="failed to get container status \"c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983\": rpc error: code = NotFound desc = could not find container \"c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983\": container with ID starting with c17c87a6510056742f8f63f247fa5d016b52370c7ea5de77c031988be7de5983 not found: ID does not exist" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.899519 4856 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.899709 4856 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhzmr\" (UniqueName: \"kubernetes.io/projected/5aa08166-3116-4ac1-b4f3-4632e545182f-kube-api-access-mhzmr\") on node \"crc\" DevicePath \"\"" Dec 02 00:32:32 crc kubenswrapper[4856]: I1202 00:32:32.899739 4856 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa08166-3116-4ac1-b4f3-4632e545182f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 00:32:33 crc kubenswrapper[4856]: I1202 00:32:33.172355 4856 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:33 crc kubenswrapper[4856]: I1202 00:32:33.178958 4856 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z5ngb"] Dec 02 00:32:33 crc kubenswrapper[4856]: I1202 00:32:33.262068 4856 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aa08166-3116-4ac1-b4f3-4632e545182f" path="/var/lib/kubelet/pods/5aa08166-3116-4ac1-b4f3-4632e545182f/volumes" Dec 02 00:32:47 crc kubenswrapper[4856]: I1202 00:32:47.253076 4856 scope.go:117] "RemoveContainer" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" Dec 02 00:32:47 crc kubenswrapper[4856]: E1202 00:32:47.253786 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" Dec 02 00:33:00 crc kubenswrapper[4856]: I1202 00:33:00.251975 4856 scope.go:117] "RemoveContainer" containerID="157d719dc098f8c216b760d10c270d947a725276a55e09b4f55e821566cfc9ee" Dec 02 00:33:00 crc kubenswrapper[4856]: E1202 00:33:00.252824 4856 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-455ww_openshift-machine-config-operator(0271f00d-b420-4dee-aa8b-92d6fc294b2a)\"" pod="openshift-machine-config-operator/machine-config-daemon-455ww" podUID="0271f00d-b420-4dee-aa8b-92d6fc294b2a" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113431702024442 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113431703017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113426171016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113426171015456 5ustar corecore